1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/atomic.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen
= 1000;
31 module_param(cryptd_max_cpu_qlen
, uint
, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
34 static struct workqueue_struct
*cryptd_wq
;
36 struct cryptd_cpu_queue
{
37 struct crypto_queue queue
;
38 struct work_struct work
;
42 struct cryptd_cpu_queue __percpu
*cpu_queue
;
45 struct cryptd_instance_ctx
{
46 struct crypto_spawn spawn
;
47 struct cryptd_queue
*queue
;
50 struct skcipherd_instance_ctx
{
51 struct crypto_skcipher_spawn spawn
;
52 struct cryptd_queue
*queue
;
55 struct hashd_instance_ctx
{
56 struct crypto_shash_spawn spawn
;
57 struct cryptd_queue
*queue
;
60 struct aead_instance_ctx
{
61 struct crypto_aead_spawn aead_spawn
;
62 struct cryptd_queue
*queue
;
65 struct cryptd_skcipher_ctx
{
67 struct crypto_sync_skcipher
*child
;
70 struct cryptd_skcipher_request_ctx
{
71 crypto_completion_t complete
;
74 struct cryptd_hash_ctx
{
76 struct crypto_shash
*child
;
79 struct cryptd_hash_request_ctx
{
80 crypto_completion_t complete
;
81 struct shash_desc desc
;
84 struct cryptd_aead_ctx
{
86 struct crypto_aead
*child
;
89 struct cryptd_aead_request_ctx
{
90 crypto_completion_t complete
;
93 static void cryptd_queue_worker(struct work_struct
*work
);
95 static int cryptd_init_queue(struct cryptd_queue
*queue
,
96 unsigned int max_cpu_qlen
)
99 struct cryptd_cpu_queue
*cpu_queue
;
101 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
102 if (!queue
->cpu_queue
)
104 for_each_possible_cpu(cpu
) {
105 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
106 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
107 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
109 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
113 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
116 struct cryptd_cpu_queue
*cpu_queue
;
118 for_each_possible_cpu(cpu
) {
119 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
120 BUG_ON(cpu_queue
->queue
.qlen
);
122 free_percpu(queue
->cpu_queue
);
125 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
126 struct crypto_async_request
*request
)
129 struct cryptd_cpu_queue
*cpu_queue
;
133 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
134 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
136 refcnt
= crypto_tfm_ctx(request
->tfm
);
141 queue_work_on(cpu
, cryptd_wq
, &cpu_queue
->work
);
143 if (!atomic_read(refcnt
))
154 /* Called in workqueue context, do one real cryption work (via
155 * req->complete) and reschedule itself if there are more work to
157 static void cryptd_queue_worker(struct work_struct
*work
)
159 struct cryptd_cpu_queue
*cpu_queue
;
160 struct crypto_async_request
*req
, *backlog
;
162 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
164 * Only handle one request at a time to avoid hogging crypto workqueue.
165 * preempt_disable/enable is used to prevent being preempted by
166 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
167 * cryptd_enqueue_request() being accessed from software interrupts.
171 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
172 req
= crypto_dequeue_request(&cpu_queue
->queue
);
180 backlog
->complete(backlog
, -EINPROGRESS
);
181 req
->complete(req
, 0);
183 if (cpu_queue
->queue
.qlen
)
184 queue_work(cryptd_wq
, &cpu_queue
->work
);
187 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
189 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
190 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
194 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
197 struct crypto_attr_type
*algt
;
199 algt
= crypto_get_attr_type(tb
);
203 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
204 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
207 static int cryptd_init_instance(struct crypto_instance
*inst
,
208 struct crypto_alg
*alg
)
210 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
212 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
213 return -ENAMETOOLONG
;
215 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
217 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
218 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
219 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
224 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
228 struct crypto_instance
*inst
;
231 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
233 return ERR_PTR(-ENOMEM
);
235 inst
= (void *)(p
+ head
);
237 err
= cryptd_init_instance(inst
, alg
);
250 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
251 const u8
*key
, unsigned int keylen
)
253 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
254 struct crypto_sync_skcipher
*child
= ctx
->child
;
257 crypto_sync_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
258 crypto_sync_skcipher_set_flags(child
,
259 crypto_skcipher_get_flags(parent
) &
260 CRYPTO_TFM_REQ_MASK
);
261 err
= crypto_sync_skcipher_setkey(child
, key
, keylen
);
262 crypto_skcipher_set_flags(parent
,
263 crypto_sync_skcipher_get_flags(child
) &
264 CRYPTO_TFM_RES_MASK
);
268 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
270 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
271 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
272 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
273 int refcnt
= atomic_read(&ctx
->refcnt
);
276 rctx
->complete(&req
->base
, err
);
279 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
280 crypto_free_skcipher(tfm
);
283 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
286 struct skcipher_request
*req
= skcipher_request_cast(base
);
287 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
288 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
289 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
290 struct crypto_sync_skcipher
*child
= ctx
->child
;
291 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
293 if (unlikely(err
== -EINPROGRESS
))
296 skcipher_request_set_sync_tfm(subreq
, child
);
297 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
299 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
302 err
= crypto_skcipher_encrypt(subreq
);
303 skcipher_request_zero(subreq
);
305 req
->base
.complete
= rctx
->complete
;
308 cryptd_skcipher_complete(req
, err
);
311 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
314 struct skcipher_request
*req
= skcipher_request_cast(base
);
315 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
316 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
317 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
318 struct crypto_sync_skcipher
*child
= ctx
->child
;
319 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
321 if (unlikely(err
== -EINPROGRESS
))
324 skcipher_request_set_sync_tfm(subreq
, child
);
325 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
327 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
330 err
= crypto_skcipher_decrypt(subreq
);
331 skcipher_request_zero(subreq
);
333 req
->base
.complete
= rctx
->complete
;
336 cryptd_skcipher_complete(req
, err
);
339 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
340 crypto_completion_t
compl)
342 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
343 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
344 struct cryptd_queue
*queue
;
346 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
347 rctx
->complete
= req
->base
.complete
;
348 req
->base
.complete
= compl;
350 return cryptd_enqueue_request(queue
, &req
->base
);
353 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
355 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
358 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
360 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
363 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
365 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
366 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
367 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
368 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
369 struct crypto_skcipher
*cipher
;
371 cipher
= crypto_spawn_skcipher(spawn
);
373 return PTR_ERR(cipher
);
375 ctx
->child
= (struct crypto_sync_skcipher
*)cipher
;
376 crypto_skcipher_set_reqsize(
377 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
381 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
383 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
385 crypto_free_sync_skcipher(ctx
->child
);
388 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
390 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
392 crypto_drop_skcipher(&ctx
->spawn
);
396 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
398 struct cryptd_queue
*queue
)
400 struct skcipherd_instance_ctx
*ctx
;
401 struct skcipher_instance
*inst
;
402 struct skcipher_alg
*alg
;
409 mask
= CRYPTO_ALG_ASYNC
;
411 cryptd_check_internal(tb
, &type
, &mask
);
413 name
= crypto_attr_alg_name(tb
[1]);
415 return PTR_ERR(name
);
417 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
421 ctx
= skcipher_instance_ctx(inst
);
424 crypto_set_skcipher_spawn(&ctx
->spawn
, skcipher_crypto_instance(inst
));
425 err
= crypto_grab_skcipher(&ctx
->spawn
, name
, type
, mask
);
429 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
430 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
432 goto out_drop_skcipher
;
434 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
435 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
437 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
438 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
439 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
440 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
442 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
444 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
445 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
447 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
448 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
449 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
451 inst
->free
= cryptd_skcipher_free
;
453 err
= skcipher_register_instance(tmpl
, inst
);
456 crypto_drop_skcipher(&ctx
->spawn
);
463 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
465 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
466 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
467 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
468 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
469 struct crypto_shash
*hash
;
471 hash
= crypto_spawn_shash(spawn
);
473 return PTR_ERR(hash
);
476 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
477 sizeof(struct cryptd_hash_request_ctx
) +
478 crypto_shash_descsize(hash
));
482 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
484 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
486 crypto_free_shash(ctx
->child
);
489 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
490 const u8
*key
, unsigned int keylen
)
492 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
493 struct crypto_shash
*child
= ctx
->child
;
496 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
497 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
498 CRYPTO_TFM_REQ_MASK
);
499 err
= crypto_shash_setkey(child
, key
, keylen
);
500 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
501 CRYPTO_TFM_RES_MASK
);
505 static int cryptd_hash_enqueue(struct ahash_request
*req
,
506 crypto_completion_t
compl)
508 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
509 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
510 struct cryptd_queue
*queue
=
511 cryptd_get_queue(crypto_ahash_tfm(tfm
));
513 rctx
->complete
= req
->base
.complete
;
514 req
->base
.complete
= compl;
516 return cryptd_enqueue_request(queue
, &req
->base
);
519 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
521 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
522 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
523 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
524 int refcnt
= atomic_read(&ctx
->refcnt
);
527 rctx
->complete(&req
->base
, err
);
530 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
531 crypto_free_ahash(tfm
);
534 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
536 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
537 struct crypto_shash
*child
= ctx
->child
;
538 struct ahash_request
*req
= ahash_request_cast(req_async
);
539 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
540 struct shash_desc
*desc
= &rctx
->desc
;
542 if (unlikely(err
== -EINPROGRESS
))
547 err
= crypto_shash_init(desc
);
549 req
->base
.complete
= rctx
->complete
;
552 cryptd_hash_complete(req
, err
);
555 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
557 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
560 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
562 struct ahash_request
*req
= ahash_request_cast(req_async
);
563 struct cryptd_hash_request_ctx
*rctx
;
565 rctx
= ahash_request_ctx(req
);
567 if (unlikely(err
== -EINPROGRESS
))
570 err
= shash_ahash_update(req
, &rctx
->desc
);
572 req
->base
.complete
= rctx
->complete
;
575 cryptd_hash_complete(req
, err
);
578 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
580 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
583 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
585 struct ahash_request
*req
= ahash_request_cast(req_async
);
586 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
588 if (unlikely(err
== -EINPROGRESS
))
591 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
593 req
->base
.complete
= rctx
->complete
;
596 cryptd_hash_complete(req
, err
);
599 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
601 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
604 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
606 struct ahash_request
*req
= ahash_request_cast(req_async
);
607 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
609 if (unlikely(err
== -EINPROGRESS
))
612 err
= shash_ahash_finup(req
, &rctx
->desc
);
614 req
->base
.complete
= rctx
->complete
;
617 cryptd_hash_complete(req
, err
);
620 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
622 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
625 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
627 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
628 struct crypto_shash
*child
= ctx
->child
;
629 struct ahash_request
*req
= ahash_request_cast(req_async
);
630 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
631 struct shash_desc
*desc
= &rctx
->desc
;
633 if (unlikely(err
== -EINPROGRESS
))
638 err
= shash_ahash_digest(req
, desc
);
640 req
->base
.complete
= rctx
->complete
;
643 cryptd_hash_complete(req
, err
);
646 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
648 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
651 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
653 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
655 return crypto_shash_export(&rctx
->desc
, out
);
658 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
660 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
661 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
662 struct shash_desc
*desc
= cryptd_shash_desc(req
);
664 desc
->tfm
= ctx
->child
;
666 return crypto_shash_import(desc
, in
);
669 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
670 struct cryptd_queue
*queue
)
672 struct hashd_instance_ctx
*ctx
;
673 struct ahash_instance
*inst
;
674 struct shash_alg
*salg
;
675 struct crypto_alg
*alg
;
680 cryptd_check_internal(tb
, &type
, &mask
);
682 salg
= shash_attr_alg(tb
[1], type
, mask
);
684 return PTR_ERR(salg
);
687 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
693 ctx
= ahash_instance_ctx(inst
);
696 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
697 ahash_crypto_instance(inst
));
701 inst
->alg
.halg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
702 (alg
->cra_flags
& (CRYPTO_ALG_INTERNAL
|
703 CRYPTO_ALG_OPTIONAL_KEY
));
705 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
706 inst
->alg
.halg
.statesize
= salg
->statesize
;
707 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
709 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
710 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
712 inst
->alg
.init
= cryptd_hash_init_enqueue
;
713 inst
->alg
.update
= cryptd_hash_update_enqueue
;
714 inst
->alg
.final
= cryptd_hash_final_enqueue
;
715 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
716 inst
->alg
.export
= cryptd_hash_export
;
717 inst
->alg
.import
= cryptd_hash_import
;
718 if (crypto_shash_alg_has_setkey(salg
))
719 inst
->alg
.setkey
= cryptd_hash_setkey
;
720 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
722 err
= ahash_register_instance(tmpl
, inst
);
724 crypto_drop_shash(&ctx
->spawn
);
734 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
735 const u8
*key
, unsigned int keylen
)
737 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
738 struct crypto_aead
*child
= ctx
->child
;
740 return crypto_aead_setkey(child
, key
, keylen
);
743 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
744 unsigned int authsize
)
746 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
747 struct crypto_aead
*child
= ctx
->child
;
749 return crypto_aead_setauthsize(child
, authsize
);
752 static void cryptd_aead_crypt(struct aead_request
*req
,
753 struct crypto_aead
*child
,
755 int (*crypt
)(struct aead_request
*req
))
757 struct cryptd_aead_request_ctx
*rctx
;
758 struct cryptd_aead_ctx
*ctx
;
759 crypto_completion_t
compl;
760 struct crypto_aead
*tfm
;
763 rctx
= aead_request_ctx(req
);
764 compl = rctx
->complete
;
766 tfm
= crypto_aead_reqtfm(req
);
768 if (unlikely(err
== -EINPROGRESS
))
770 aead_request_set_tfm(req
, child
);
774 ctx
= crypto_aead_ctx(tfm
);
775 refcnt
= atomic_read(&ctx
->refcnt
);
778 compl(&req
->base
, err
);
781 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
782 crypto_free_aead(tfm
);
785 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
787 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
788 struct crypto_aead
*child
= ctx
->child
;
789 struct aead_request
*req
;
791 req
= container_of(areq
, struct aead_request
, base
);
792 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
795 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
797 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
798 struct crypto_aead
*child
= ctx
->child
;
799 struct aead_request
*req
;
801 req
= container_of(areq
, struct aead_request
, base
);
802 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
805 static int cryptd_aead_enqueue(struct aead_request
*req
,
806 crypto_completion_t
compl)
808 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
809 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
810 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
812 rctx
->complete
= req
->base
.complete
;
813 req
->base
.complete
= compl;
814 return cryptd_enqueue_request(queue
, &req
->base
);
817 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
819 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
822 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
824 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
827 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
829 struct aead_instance
*inst
= aead_alg_instance(tfm
);
830 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
831 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
832 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
833 struct crypto_aead
*cipher
;
835 cipher
= crypto_spawn_aead(spawn
);
837 return PTR_ERR(cipher
);
840 crypto_aead_set_reqsize(
841 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
842 crypto_aead_reqsize(cipher
)));
846 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
848 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
849 crypto_free_aead(ctx
->child
);
852 static int cryptd_create_aead(struct crypto_template
*tmpl
,
854 struct cryptd_queue
*queue
)
856 struct aead_instance_ctx
*ctx
;
857 struct aead_instance
*inst
;
858 struct aead_alg
*alg
;
861 u32 mask
= CRYPTO_ALG_ASYNC
;
864 cryptd_check_internal(tb
, &type
, &mask
);
866 name
= crypto_attr_alg_name(tb
[1]);
868 return PTR_ERR(name
);
870 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
874 ctx
= aead_instance_ctx(inst
);
877 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
878 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
882 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
883 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
887 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
888 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
889 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
891 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
892 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
894 inst
->alg
.init
= cryptd_aead_init_tfm
;
895 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
896 inst
->alg
.setkey
= cryptd_aead_setkey
;
897 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
898 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
899 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
901 err
= aead_register_instance(tmpl
, inst
);
904 crypto_drop_aead(&ctx
->aead_spawn
);
911 static struct cryptd_queue queue
;
913 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
915 struct crypto_attr_type
*algt
;
917 algt
= crypto_get_attr_type(tb
);
919 return PTR_ERR(algt
);
921 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
922 case CRYPTO_ALG_TYPE_BLKCIPHER
:
923 return cryptd_create_skcipher(tmpl
, tb
, &queue
);
924 case CRYPTO_ALG_TYPE_HASH
:
925 return cryptd_create_hash(tmpl
, tb
, &queue
);
926 case CRYPTO_ALG_TYPE_AEAD
:
927 return cryptd_create_aead(tmpl
, tb
, &queue
);
933 static void cryptd_free(struct crypto_instance
*inst
)
935 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
936 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
937 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
939 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
940 case CRYPTO_ALG_TYPE_AHASH
:
941 crypto_drop_shash(&hctx
->spawn
);
942 kfree(ahash_instance(inst
));
944 case CRYPTO_ALG_TYPE_AEAD
:
945 crypto_drop_aead(&aead_ctx
->aead_spawn
);
946 kfree(aead_instance(inst
));
949 crypto_drop_spawn(&ctx
->spawn
);
954 static struct crypto_template cryptd_tmpl
= {
956 .create
= cryptd_create
,
958 .module
= THIS_MODULE
,
961 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
964 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
965 struct cryptd_skcipher_ctx
*ctx
;
966 struct crypto_skcipher
*tfm
;
968 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
969 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
970 return ERR_PTR(-EINVAL
);
972 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
974 return ERR_CAST(tfm
);
976 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
977 crypto_free_skcipher(tfm
);
978 return ERR_PTR(-EINVAL
);
981 ctx
= crypto_skcipher_ctx(tfm
);
982 atomic_set(&ctx
->refcnt
, 1);
984 return container_of(tfm
, struct cryptd_skcipher
, base
);
986 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
988 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
990 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
992 return &ctx
->child
->base
;
994 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
996 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
998 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1000 return atomic_read(&ctx
->refcnt
) - 1;
1002 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
1004 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
1006 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
1008 if (atomic_dec_and_test(&ctx
->refcnt
))
1009 crypto_free_skcipher(&tfm
->base
);
1011 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
1013 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
1016 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1017 struct cryptd_hash_ctx
*ctx
;
1018 struct crypto_ahash
*tfm
;
1020 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1021 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1022 return ERR_PTR(-EINVAL
);
1023 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1025 return ERR_CAST(tfm
);
1026 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1027 crypto_free_ahash(tfm
);
1028 return ERR_PTR(-EINVAL
);
1031 ctx
= crypto_ahash_ctx(tfm
);
1032 atomic_set(&ctx
->refcnt
, 1);
1034 return __cryptd_ahash_cast(tfm
);
1036 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1038 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1040 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1044 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1046 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1048 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1051 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1053 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1055 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1057 return atomic_read(&ctx
->refcnt
) - 1;
1059 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1061 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1063 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1065 if (atomic_dec_and_test(&ctx
->refcnt
))
1066 crypto_free_ahash(&tfm
->base
);
1068 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1070 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1073 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1074 struct cryptd_aead_ctx
*ctx
;
1075 struct crypto_aead
*tfm
;
1077 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1078 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1079 return ERR_PTR(-EINVAL
);
1080 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1082 return ERR_CAST(tfm
);
1083 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1084 crypto_free_aead(tfm
);
1085 return ERR_PTR(-EINVAL
);
1088 ctx
= crypto_aead_ctx(tfm
);
1089 atomic_set(&ctx
->refcnt
, 1);
1091 return __cryptd_aead_cast(tfm
);
1093 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1095 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1097 struct cryptd_aead_ctx
*ctx
;
1098 ctx
= crypto_aead_ctx(&tfm
->base
);
1101 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1103 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1105 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1107 return atomic_read(&ctx
->refcnt
) - 1;
1109 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1111 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1113 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1115 if (atomic_dec_and_test(&ctx
->refcnt
))
1116 crypto_free_aead(&tfm
->base
);
1118 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1120 static int __init
cryptd_init(void)
1124 cryptd_wq
= alloc_workqueue("cryptd", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
1129 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1131 goto err_destroy_wq
;
1133 err
= crypto_register_template(&cryptd_tmpl
);
1135 goto err_fini_queue
;
1140 cryptd_fini_queue(&queue
);
1142 destroy_workqueue(cryptd_wq
);
1146 static void __exit
cryptd_exit(void)
1148 destroy_workqueue(cryptd_wq
);
1149 cryptd_fini_queue(&queue
);
1150 crypto_unregister_template(&cryptd_tmpl
);
1153 subsys_initcall(cryptd_init
);
1154 module_exit(cryptd_exit
);
1156 MODULE_LICENSE("GPL");
1157 MODULE_DESCRIPTION("Software async crypto daemon");
1158 MODULE_ALIAS_CRYPTO("cryptd");