1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen
= 1000;
31 module_param(cryptd_max_cpu_qlen
, uint
, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
34 static struct workqueue_struct
*cryptd_wq
;
36 struct cryptd_cpu_queue
{
37 struct crypto_queue queue
;
38 struct work_struct work
;
42 struct cryptd_cpu_queue __percpu
*cpu_queue
;
45 struct cryptd_instance_ctx
{
46 struct crypto_spawn spawn
;
47 struct cryptd_queue
*queue
;
50 struct skcipherd_instance_ctx
{
51 struct crypto_skcipher_spawn spawn
;
52 struct cryptd_queue
*queue
;
55 struct hashd_instance_ctx
{
56 struct crypto_shash_spawn spawn
;
57 struct cryptd_queue
*queue
;
60 struct aead_instance_ctx
{
61 struct crypto_aead_spawn aead_spawn
;
62 struct cryptd_queue
*queue
;
65 struct cryptd_skcipher_ctx
{
67 struct crypto_sync_skcipher
*child
;
70 struct cryptd_skcipher_request_ctx
{
71 crypto_completion_t complete
;
74 struct cryptd_hash_ctx
{
76 struct crypto_shash
*child
;
79 struct cryptd_hash_request_ctx
{
80 crypto_completion_t complete
;
81 struct shash_desc desc
;
84 struct cryptd_aead_ctx
{
86 struct crypto_aead
*child
;
89 struct cryptd_aead_request_ctx
{
90 crypto_completion_t complete
;
93 static void cryptd_queue_worker(struct work_struct
*work
);
95 static int cryptd_init_queue(struct cryptd_queue
*queue
,
96 unsigned int max_cpu_qlen
)
99 struct cryptd_cpu_queue
*cpu_queue
;
101 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
102 if (!queue
->cpu_queue
)
104 for_each_possible_cpu(cpu
) {
105 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
106 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
107 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
109 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
113 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
116 struct cryptd_cpu_queue
*cpu_queue
;
118 for_each_possible_cpu(cpu
) {
119 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
120 BUG_ON(cpu_queue
->queue
.qlen
);
122 free_percpu(queue
->cpu_queue
);
125 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
126 struct crypto_async_request
*request
)
129 struct cryptd_cpu_queue
*cpu_queue
;
133 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
134 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
136 refcnt
= crypto_tfm_ctx(request
->tfm
);
141 queue_work_on(cpu
, cryptd_wq
, &cpu_queue
->work
);
143 if (!refcount_read(refcnt
))
146 refcount_inc(refcnt
);
154 /* Called in workqueue context, do one real cryption work (via
155 * req->complete) and reschedule itself if there are more work to
157 static void cryptd_queue_worker(struct work_struct
*work
)
159 struct cryptd_cpu_queue
*cpu_queue
;
160 struct crypto_async_request
*req
, *backlog
;
162 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
164 * Only handle one request at a time to avoid hogging crypto workqueue.
165 * preempt_disable/enable is used to prevent being preempted by
166 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
167 * cryptd_enqueue_request() being accessed from software interrupts.
171 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
172 req
= crypto_dequeue_request(&cpu_queue
->queue
);
180 backlog
->complete(backlog
, -EINPROGRESS
);
181 req
->complete(req
, 0);
183 if (cpu_queue
->queue
.qlen
)
184 queue_work(cryptd_wq
, &cpu_queue
->work
);
187 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
189 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
190 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
194 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
197 struct crypto_attr_type
*algt
;
199 algt
= crypto_get_attr_type(tb
);
203 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
204 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
207 static int cryptd_init_instance(struct crypto_instance
*inst
,
208 struct crypto_alg
*alg
)
210 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
212 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
213 return -ENAMETOOLONG
;
215 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
217 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
218 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
219 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
224 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
225 const u8
*key
, unsigned int keylen
)
227 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
228 struct crypto_sync_skcipher
*child
= ctx
->child
;
230 crypto_sync_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
231 crypto_sync_skcipher_set_flags(child
,
232 crypto_skcipher_get_flags(parent
) &
233 CRYPTO_TFM_REQ_MASK
);
234 return crypto_sync_skcipher_setkey(child
, key
, keylen
);
237 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
239 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
240 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
241 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
242 int refcnt
= refcount_read(&ctx
->refcnt
);
245 rctx
->complete(&req
->base
, err
);
248 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
249 crypto_free_skcipher(tfm
);
252 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
255 struct skcipher_request
*req
= skcipher_request_cast(base
);
256 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
257 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
258 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
259 struct crypto_sync_skcipher
*child
= ctx
->child
;
260 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
262 if (unlikely(err
== -EINPROGRESS
))
265 skcipher_request_set_sync_tfm(subreq
, child
);
266 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
268 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
271 err
= crypto_skcipher_encrypt(subreq
);
272 skcipher_request_zero(subreq
);
274 req
->base
.complete
= rctx
->complete
;
277 cryptd_skcipher_complete(req
, err
);
280 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
283 struct skcipher_request
*req
= skcipher_request_cast(base
);
284 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
285 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
286 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
287 struct crypto_sync_skcipher
*child
= ctx
->child
;
288 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
290 if (unlikely(err
== -EINPROGRESS
))
293 skcipher_request_set_sync_tfm(subreq
, child
);
294 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
296 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
299 err
= crypto_skcipher_decrypt(subreq
);
300 skcipher_request_zero(subreq
);
302 req
->base
.complete
= rctx
->complete
;
305 cryptd_skcipher_complete(req
, err
);
308 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
309 crypto_completion_t
compl)
311 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
312 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
313 struct cryptd_queue
*queue
;
315 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
316 rctx
->complete
= req
->base
.complete
;
317 req
->base
.complete
= compl;
319 return cryptd_enqueue_request(queue
, &req
->base
);
322 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
324 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
327 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
329 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
332 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
334 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
335 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
336 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
337 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
338 struct crypto_skcipher
*cipher
;
340 cipher
= crypto_spawn_skcipher(spawn
);
342 return PTR_ERR(cipher
);
344 ctx
->child
= (struct crypto_sync_skcipher
*)cipher
;
345 crypto_skcipher_set_reqsize(
346 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
350 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
352 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
354 crypto_free_sync_skcipher(ctx
->child
);
357 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
359 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
361 crypto_drop_skcipher(&ctx
->spawn
);
365 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
367 struct cryptd_queue
*queue
)
369 struct skcipherd_instance_ctx
*ctx
;
370 struct skcipher_instance
*inst
;
371 struct skcipher_alg
*alg
;
378 mask
= CRYPTO_ALG_ASYNC
;
380 cryptd_check_internal(tb
, &type
, &mask
);
382 name
= crypto_attr_alg_name(tb
[1]);
384 return PTR_ERR(name
);
386 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
390 ctx
= skcipher_instance_ctx(inst
);
393 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
398 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
399 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
401 goto out_drop_skcipher
;
403 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
404 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
406 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
407 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
408 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
409 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
411 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
413 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
414 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
416 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
417 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
418 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
420 inst
->free
= cryptd_skcipher_free
;
422 err
= skcipher_register_instance(tmpl
, inst
);
425 crypto_drop_skcipher(&ctx
->spawn
);
432 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
434 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
435 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
436 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
437 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
438 struct crypto_shash
*hash
;
440 hash
= crypto_spawn_shash(spawn
);
442 return PTR_ERR(hash
);
445 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
446 sizeof(struct cryptd_hash_request_ctx
) +
447 crypto_shash_descsize(hash
));
451 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
453 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
455 crypto_free_shash(ctx
->child
);
458 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
459 const u8
*key
, unsigned int keylen
)
461 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
462 struct crypto_shash
*child
= ctx
->child
;
464 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
465 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
466 CRYPTO_TFM_REQ_MASK
);
467 return crypto_shash_setkey(child
, key
, keylen
);
470 static int cryptd_hash_enqueue(struct ahash_request
*req
,
471 crypto_completion_t
compl)
473 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
474 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
475 struct cryptd_queue
*queue
=
476 cryptd_get_queue(crypto_ahash_tfm(tfm
));
478 rctx
->complete
= req
->base
.complete
;
479 req
->base
.complete
= compl;
481 return cryptd_enqueue_request(queue
, &req
->base
);
484 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
486 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
487 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
488 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
489 int refcnt
= refcount_read(&ctx
->refcnt
);
492 rctx
->complete(&req
->base
, err
);
495 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
496 crypto_free_ahash(tfm
);
499 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
501 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
502 struct crypto_shash
*child
= ctx
->child
;
503 struct ahash_request
*req
= ahash_request_cast(req_async
);
504 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
505 struct shash_desc
*desc
= &rctx
->desc
;
507 if (unlikely(err
== -EINPROGRESS
))
512 err
= crypto_shash_init(desc
);
514 req
->base
.complete
= rctx
->complete
;
517 cryptd_hash_complete(req
, err
);
520 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
522 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
525 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
527 struct ahash_request
*req
= ahash_request_cast(req_async
);
528 struct cryptd_hash_request_ctx
*rctx
;
530 rctx
= ahash_request_ctx(req
);
532 if (unlikely(err
== -EINPROGRESS
))
535 err
= shash_ahash_update(req
, &rctx
->desc
);
537 req
->base
.complete
= rctx
->complete
;
540 cryptd_hash_complete(req
, err
);
543 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
545 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
548 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
550 struct ahash_request
*req
= ahash_request_cast(req_async
);
551 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
553 if (unlikely(err
== -EINPROGRESS
))
556 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
558 req
->base
.complete
= rctx
->complete
;
561 cryptd_hash_complete(req
, err
);
564 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
566 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
569 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
571 struct ahash_request
*req
= ahash_request_cast(req_async
);
572 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
574 if (unlikely(err
== -EINPROGRESS
))
577 err
= shash_ahash_finup(req
, &rctx
->desc
);
579 req
->base
.complete
= rctx
->complete
;
582 cryptd_hash_complete(req
, err
);
585 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
587 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
590 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
592 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
593 struct crypto_shash
*child
= ctx
->child
;
594 struct ahash_request
*req
= ahash_request_cast(req_async
);
595 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
596 struct shash_desc
*desc
= &rctx
->desc
;
598 if (unlikely(err
== -EINPROGRESS
))
603 err
= shash_ahash_digest(req
, desc
);
605 req
->base
.complete
= rctx
->complete
;
608 cryptd_hash_complete(req
, err
);
611 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
613 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
616 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
618 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
620 return crypto_shash_export(&rctx
->desc
, out
);
623 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
625 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
626 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
627 struct shash_desc
*desc
= cryptd_shash_desc(req
);
629 desc
->tfm
= ctx
->child
;
631 return crypto_shash_import(desc
, in
);
634 static void cryptd_hash_free(struct ahash_instance
*inst
)
636 struct hashd_instance_ctx
*ctx
= ahash_instance_ctx(inst
);
638 crypto_drop_shash(&ctx
->spawn
);
642 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
643 struct cryptd_queue
*queue
)
645 struct hashd_instance_ctx
*ctx
;
646 struct ahash_instance
*inst
;
647 struct shash_alg
*alg
;
652 cryptd_check_internal(tb
, &type
, &mask
);
654 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
658 ctx
= ahash_instance_ctx(inst
);
661 err
= crypto_grab_shash(&ctx
->spawn
, ahash_crypto_instance(inst
),
662 crypto_attr_alg_name(tb
[1]), type
, mask
);
665 alg
= crypto_spawn_shash_alg(&ctx
->spawn
);
667 err
= cryptd_init_instance(ahash_crypto_instance(inst
), &alg
->base
);
671 inst
->alg
.halg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
672 (alg
->base
.cra_flags
& (CRYPTO_ALG_INTERNAL
|
673 CRYPTO_ALG_OPTIONAL_KEY
));
675 inst
->alg
.halg
.digestsize
= alg
->digestsize
;
676 inst
->alg
.halg
.statesize
= alg
->statesize
;
677 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
679 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
680 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
682 inst
->alg
.init
= cryptd_hash_init_enqueue
;
683 inst
->alg
.update
= cryptd_hash_update_enqueue
;
684 inst
->alg
.final
= cryptd_hash_final_enqueue
;
685 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
686 inst
->alg
.export
= cryptd_hash_export
;
687 inst
->alg
.import
= cryptd_hash_import
;
688 if (crypto_shash_alg_has_setkey(alg
))
689 inst
->alg
.setkey
= cryptd_hash_setkey
;
690 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
692 inst
->free
= cryptd_hash_free
;
694 err
= ahash_register_instance(tmpl
, inst
);
697 crypto_drop_shash(&ctx
->spawn
);
703 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
704 const u8
*key
, unsigned int keylen
)
706 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
707 struct crypto_aead
*child
= ctx
->child
;
709 return crypto_aead_setkey(child
, key
, keylen
);
712 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
713 unsigned int authsize
)
715 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
716 struct crypto_aead
*child
= ctx
->child
;
718 return crypto_aead_setauthsize(child
, authsize
);
721 static void cryptd_aead_crypt(struct aead_request
*req
,
722 struct crypto_aead
*child
,
724 int (*crypt
)(struct aead_request
*req
))
726 struct cryptd_aead_request_ctx
*rctx
;
727 struct cryptd_aead_ctx
*ctx
;
728 crypto_completion_t
compl;
729 struct crypto_aead
*tfm
;
732 rctx
= aead_request_ctx(req
);
733 compl = rctx
->complete
;
735 tfm
= crypto_aead_reqtfm(req
);
737 if (unlikely(err
== -EINPROGRESS
))
739 aead_request_set_tfm(req
, child
);
743 ctx
= crypto_aead_ctx(tfm
);
744 refcnt
= refcount_read(&ctx
->refcnt
);
747 compl(&req
->base
, err
);
750 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
751 crypto_free_aead(tfm
);
754 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
756 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
757 struct crypto_aead
*child
= ctx
->child
;
758 struct aead_request
*req
;
760 req
= container_of(areq
, struct aead_request
, base
);
761 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
764 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
766 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
767 struct crypto_aead
*child
= ctx
->child
;
768 struct aead_request
*req
;
770 req
= container_of(areq
, struct aead_request
, base
);
771 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
774 static int cryptd_aead_enqueue(struct aead_request
*req
,
775 crypto_completion_t
compl)
777 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
778 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
779 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
781 rctx
->complete
= req
->base
.complete
;
782 req
->base
.complete
= compl;
783 return cryptd_enqueue_request(queue
, &req
->base
);
786 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
788 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
791 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
793 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
796 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
798 struct aead_instance
*inst
= aead_alg_instance(tfm
);
799 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
800 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
801 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
802 struct crypto_aead
*cipher
;
804 cipher
= crypto_spawn_aead(spawn
);
806 return PTR_ERR(cipher
);
809 crypto_aead_set_reqsize(
810 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
811 crypto_aead_reqsize(cipher
)));
815 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
817 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
818 crypto_free_aead(ctx
->child
);
821 static void cryptd_aead_free(struct aead_instance
*inst
)
823 struct aead_instance_ctx
*ctx
= aead_instance_ctx(inst
);
825 crypto_drop_aead(&ctx
->aead_spawn
);
829 static int cryptd_create_aead(struct crypto_template
*tmpl
,
831 struct cryptd_queue
*queue
)
833 struct aead_instance_ctx
*ctx
;
834 struct aead_instance
*inst
;
835 struct aead_alg
*alg
;
838 u32 mask
= CRYPTO_ALG_ASYNC
;
841 cryptd_check_internal(tb
, &type
, &mask
);
843 name
= crypto_attr_alg_name(tb
[1]);
845 return PTR_ERR(name
);
847 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
851 ctx
= aead_instance_ctx(inst
);
854 err
= crypto_grab_aead(&ctx
->aead_spawn
, aead_crypto_instance(inst
),
859 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
860 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
864 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
865 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
866 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
868 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
869 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
871 inst
->alg
.init
= cryptd_aead_init_tfm
;
872 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
873 inst
->alg
.setkey
= cryptd_aead_setkey
;
874 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
875 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
876 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
878 inst
->free
= cryptd_aead_free
;
880 err
= aead_register_instance(tmpl
, inst
);
883 crypto_drop_aead(&ctx
->aead_spawn
);
890 static struct cryptd_queue queue
;
892 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
894 struct crypto_attr_type
*algt
;
896 algt
= crypto_get_attr_type(tb
);
898 return PTR_ERR(algt
);
900 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
901 case CRYPTO_ALG_TYPE_SKCIPHER
:
902 return cryptd_create_skcipher(tmpl
, tb
, &queue
);
903 case CRYPTO_ALG_TYPE_HASH
:
904 return cryptd_create_hash(tmpl
, tb
, &queue
);
905 case CRYPTO_ALG_TYPE_AEAD
:
906 return cryptd_create_aead(tmpl
, tb
, &queue
);
912 static struct crypto_template cryptd_tmpl
= {
914 .create
= cryptd_create
,
915 .module
= THIS_MODULE
,
918 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
921 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
922 struct cryptd_skcipher_ctx
*ctx
;
923 struct crypto_skcipher
*tfm
;
925 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
926 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
927 return ERR_PTR(-EINVAL
);
929 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
931 return ERR_CAST(tfm
);
933 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
934 crypto_free_skcipher(tfm
);
935 return ERR_PTR(-EINVAL
);
938 ctx
= crypto_skcipher_ctx(tfm
);
939 refcount_set(&ctx
->refcnt
, 1);
941 return container_of(tfm
, struct cryptd_skcipher
, base
);
943 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
945 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
947 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
949 return &ctx
->child
->base
;
951 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
953 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
955 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
957 return refcount_read(&ctx
->refcnt
) - 1;
959 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
961 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
963 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
965 if (refcount_dec_and_test(&ctx
->refcnt
))
966 crypto_free_skcipher(&tfm
->base
);
968 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
970 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
973 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
974 struct cryptd_hash_ctx
*ctx
;
975 struct crypto_ahash
*tfm
;
977 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
978 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
979 return ERR_PTR(-EINVAL
);
980 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
982 return ERR_CAST(tfm
);
983 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
984 crypto_free_ahash(tfm
);
985 return ERR_PTR(-EINVAL
);
988 ctx
= crypto_ahash_ctx(tfm
);
989 refcount_set(&ctx
->refcnt
, 1);
991 return __cryptd_ahash_cast(tfm
);
993 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
995 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
997 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1001 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1003 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1005 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1008 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1010 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1012 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1014 return refcount_read(&ctx
->refcnt
) - 1;
1016 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1018 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1020 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1022 if (refcount_dec_and_test(&ctx
->refcnt
))
1023 crypto_free_ahash(&tfm
->base
);
1025 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1027 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1030 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1031 struct cryptd_aead_ctx
*ctx
;
1032 struct crypto_aead
*tfm
;
1034 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1035 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1036 return ERR_PTR(-EINVAL
);
1037 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1039 return ERR_CAST(tfm
);
1040 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1041 crypto_free_aead(tfm
);
1042 return ERR_PTR(-EINVAL
);
1045 ctx
= crypto_aead_ctx(tfm
);
1046 refcount_set(&ctx
->refcnt
, 1);
1048 return __cryptd_aead_cast(tfm
);
1050 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1052 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1054 struct cryptd_aead_ctx
*ctx
;
1055 ctx
= crypto_aead_ctx(&tfm
->base
);
1058 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1060 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1062 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1064 return refcount_read(&ctx
->refcnt
) - 1;
1066 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1068 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1070 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1072 if (refcount_dec_and_test(&ctx
->refcnt
))
1073 crypto_free_aead(&tfm
->base
);
1075 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1077 static int __init
cryptd_init(void)
1081 cryptd_wq
= alloc_workqueue("cryptd", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
1086 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1088 goto err_destroy_wq
;
1090 err
= crypto_register_template(&cryptd_tmpl
);
1092 goto err_fini_queue
;
1097 cryptd_fini_queue(&queue
);
1099 destroy_workqueue(cryptd_wq
);
1103 static void __exit
cryptd_exit(void)
1105 destroy_workqueue(cryptd_wq
);
1106 cryptd_fini_queue(&queue
);
1107 crypto_unregister_template(&cryptd_tmpl
);
1110 subsys_initcall(cryptd_init
);
1111 module_exit(cryptd_exit
);
1113 MODULE_LICENSE("GPL");
1114 MODULE_DESCRIPTION("Software async crypto daemon");
1115 MODULE_ALIAS_CRYPTO("cryptd");