1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen
= 1000;
31 module_param(cryptd_max_cpu_qlen
, uint
, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
34 static struct workqueue_struct
*cryptd_wq
;
36 struct cryptd_cpu_queue
{
37 struct crypto_queue queue
;
38 struct work_struct work
;
43 * Protected by disabling BH to allow enqueueing from softinterrupt and
44 * dequeuing from kworker (cryptd_queue_worker()).
46 struct cryptd_cpu_queue __percpu
*cpu_queue
;
49 struct cryptd_instance_ctx
{
50 struct crypto_spawn spawn
;
51 struct cryptd_queue
*queue
;
54 struct skcipherd_instance_ctx
{
55 struct crypto_skcipher_spawn spawn
;
56 struct cryptd_queue
*queue
;
59 struct hashd_instance_ctx
{
60 struct crypto_shash_spawn spawn
;
61 struct cryptd_queue
*queue
;
64 struct aead_instance_ctx
{
65 struct crypto_aead_spawn aead_spawn
;
66 struct cryptd_queue
*queue
;
69 struct cryptd_skcipher_ctx
{
71 struct crypto_skcipher
*child
;
74 struct cryptd_skcipher_request_ctx
{
75 struct skcipher_request req
;
78 struct cryptd_hash_ctx
{
80 struct crypto_shash
*child
;
83 struct cryptd_hash_request_ctx
{
84 crypto_completion_t complete
;
86 struct shash_desc desc
;
89 struct cryptd_aead_ctx
{
91 struct crypto_aead
*child
;
94 struct cryptd_aead_request_ctx
{
95 struct aead_request req
;
98 static void cryptd_queue_worker(struct work_struct
*work
);
100 static int cryptd_init_queue(struct cryptd_queue
*queue
,
101 unsigned int max_cpu_qlen
)
104 struct cryptd_cpu_queue
*cpu_queue
;
106 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
107 if (!queue
->cpu_queue
)
109 for_each_possible_cpu(cpu
) {
110 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
111 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
112 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
114 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
118 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
121 struct cryptd_cpu_queue
*cpu_queue
;
123 for_each_possible_cpu(cpu
) {
124 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
125 BUG_ON(cpu_queue
->queue
.qlen
);
127 free_percpu(queue
->cpu_queue
);
130 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
131 struct crypto_async_request
*request
)
134 struct cryptd_cpu_queue
*cpu_queue
;
138 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
139 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
141 refcnt
= crypto_tfm_ctx(request
->tfm
);
146 queue_work_on(smp_processor_id(), cryptd_wq
, &cpu_queue
->work
);
148 if (!refcount_read(refcnt
))
151 refcount_inc(refcnt
);
159 /* Called in workqueue context, do one real cryption work (via
160 * req->complete) and reschedule itself if there are more work to
162 static void cryptd_queue_worker(struct work_struct
*work
)
164 struct cryptd_cpu_queue
*cpu_queue
;
165 struct crypto_async_request
*req
, *backlog
;
167 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
169 * Only handle one request at a time to avoid hogging crypto workqueue.
172 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
173 req
= crypto_dequeue_request(&cpu_queue
->queue
);
180 crypto_request_complete(backlog
, -EINPROGRESS
);
181 crypto_request_complete(req
, 0);
183 if (cpu_queue
->queue
.qlen
)
184 queue_work(cryptd_wq
, &cpu_queue
->work
);
187 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
189 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
190 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
194 static void cryptd_type_and_mask(struct crypto_attr_type
*algt
,
195 u32
*type
, u32
*mask
)
198 * cryptd is allowed to wrap internal algorithms, but in that case the
199 * resulting cryptd instance will be marked as internal as well.
201 *type
= algt
->type
& CRYPTO_ALG_INTERNAL
;
202 *mask
= algt
->mask
& CRYPTO_ALG_INTERNAL
;
204 /* No point in cryptd wrapping an algorithm that's already async. */
205 *mask
|= CRYPTO_ALG_ASYNC
;
207 *mask
|= crypto_algt_inherited_mask(algt
);
210 static int cryptd_init_instance(struct crypto_instance
*inst
,
211 struct crypto_alg
*alg
)
213 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
215 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
216 return -ENAMETOOLONG
;
218 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
220 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
221 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
222 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
227 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
228 const u8
*key
, unsigned int keylen
)
230 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
231 struct crypto_skcipher
*child
= ctx
->child
;
233 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
234 crypto_skcipher_set_flags(child
,
235 crypto_skcipher_get_flags(parent
) &
236 CRYPTO_TFM_REQ_MASK
);
237 return crypto_skcipher_setkey(child
, key
, keylen
);
240 static struct skcipher_request
*cryptd_skcipher_prepare(
241 struct skcipher_request
*req
, int err
)
243 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
244 struct skcipher_request
*subreq
= &rctx
->req
;
245 struct cryptd_skcipher_ctx
*ctx
;
246 struct crypto_skcipher
*child
;
248 req
->base
.complete
= subreq
->base
.complete
;
249 req
->base
.data
= subreq
->base
.data
;
251 if (unlikely(err
== -EINPROGRESS
))
254 ctx
= crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
));
257 skcipher_request_set_tfm(subreq
, child
);
258 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
260 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
266 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
,
267 crypto_completion_t complete
)
269 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
270 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
271 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
272 struct skcipher_request
*subreq
= &rctx
->req
;
273 int refcnt
= refcount_read(&ctx
->refcnt
);
276 skcipher_request_complete(req
, err
);
279 if (unlikely(err
== -EINPROGRESS
)) {
280 subreq
->base
.complete
= req
->base
.complete
;
281 subreq
->base
.data
= req
->base
.data
;
282 req
->base
.complete
= complete
;
283 req
->base
.data
= req
;
284 } else if (refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
285 crypto_free_skcipher(tfm
);
288 static void cryptd_skcipher_encrypt(void *data
, int err
)
290 struct skcipher_request
*req
= data
;
291 struct skcipher_request
*subreq
;
293 subreq
= cryptd_skcipher_prepare(req
, err
);
295 err
= crypto_skcipher_encrypt(subreq
);
297 cryptd_skcipher_complete(req
, err
, cryptd_skcipher_encrypt
);
300 static void cryptd_skcipher_decrypt(void *data
, int err
)
302 struct skcipher_request
*req
= data
;
303 struct skcipher_request
*subreq
;
305 subreq
= cryptd_skcipher_prepare(req
, err
);
307 err
= crypto_skcipher_decrypt(subreq
);
309 cryptd_skcipher_complete(req
, err
, cryptd_skcipher_decrypt
);
312 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
313 crypto_completion_t
compl)
315 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
316 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
317 struct skcipher_request
*subreq
= &rctx
->req
;
318 struct cryptd_queue
*queue
;
320 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
321 subreq
->base
.complete
= req
->base
.complete
;
322 subreq
->base
.data
= req
->base
.data
;
323 req
->base
.complete
= compl;
324 req
->base
.data
= req
;
326 return cryptd_enqueue_request(queue
, &req
->base
);
329 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
331 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
334 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
336 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
339 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
341 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
342 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
343 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
344 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
345 struct crypto_skcipher
*cipher
;
347 cipher
= crypto_spawn_skcipher(spawn
);
349 return PTR_ERR(cipher
);
352 crypto_skcipher_set_reqsize(
353 tfm
, sizeof(struct cryptd_skcipher_request_ctx
) +
354 crypto_skcipher_reqsize(cipher
));
358 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
360 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
362 crypto_free_skcipher(ctx
->child
);
365 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
367 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
369 crypto_drop_skcipher(&ctx
->spawn
);
373 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
375 struct crypto_attr_type
*algt
,
376 struct cryptd_queue
*queue
)
378 struct skcipherd_instance_ctx
*ctx
;
379 struct skcipher_instance
*inst
;
380 struct skcipher_alg_common
*alg
;
385 cryptd_type_and_mask(algt
, &type
, &mask
);
387 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
391 ctx
= skcipher_instance_ctx(inst
);
394 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
395 crypto_attr_alg_name(tb
[1]), type
, mask
);
399 alg
= crypto_spawn_skcipher_alg_common(&ctx
->spawn
);
400 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
404 inst
->alg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
405 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
406 inst
->alg
.ivsize
= alg
->ivsize
;
407 inst
->alg
.chunksize
= alg
->chunksize
;
408 inst
->alg
.min_keysize
= alg
->min_keysize
;
409 inst
->alg
.max_keysize
= alg
->max_keysize
;
411 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
413 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
414 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
416 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
417 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
418 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
420 inst
->free
= cryptd_skcipher_free
;
422 err
= skcipher_register_instance(tmpl
, inst
);
425 cryptd_skcipher_free(inst
);
430 static int cryptd_hash_init_tfm(struct crypto_ahash
*tfm
)
432 struct ahash_instance
*inst
= ahash_alg_instance(tfm
);
433 struct hashd_instance_ctx
*ictx
= ahash_instance_ctx(inst
);
434 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
435 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
436 struct crypto_shash
*hash
;
438 hash
= crypto_spawn_shash(spawn
);
440 return PTR_ERR(hash
);
443 crypto_ahash_set_reqsize(tfm
,
444 sizeof(struct cryptd_hash_request_ctx
) +
445 crypto_shash_descsize(hash
));
449 static int cryptd_hash_clone_tfm(struct crypto_ahash
*ntfm
,
450 struct crypto_ahash
*tfm
)
452 struct cryptd_hash_ctx
*nctx
= crypto_ahash_ctx(ntfm
);
453 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
454 struct crypto_shash
*hash
;
456 hash
= crypto_clone_shash(ctx
->child
);
458 return PTR_ERR(hash
);
464 static void cryptd_hash_exit_tfm(struct crypto_ahash
*tfm
)
466 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
468 crypto_free_shash(ctx
->child
);
471 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
472 const u8
*key
, unsigned int keylen
)
474 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
475 struct crypto_shash
*child
= ctx
->child
;
477 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
478 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
479 CRYPTO_TFM_REQ_MASK
);
480 return crypto_shash_setkey(child
, key
, keylen
);
483 static int cryptd_hash_enqueue(struct ahash_request
*req
,
484 crypto_completion_t
compl)
486 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
487 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
488 struct cryptd_queue
*queue
=
489 cryptd_get_queue(crypto_ahash_tfm(tfm
));
491 rctx
->complete
= req
->base
.complete
;
492 rctx
->data
= req
->base
.data
;
493 req
->base
.complete
= compl;
494 req
->base
.data
= req
;
496 return cryptd_enqueue_request(queue
, &req
->base
);
499 static struct shash_desc
*cryptd_hash_prepare(struct ahash_request
*req
,
502 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
504 req
->base
.complete
= rctx
->complete
;
505 req
->base
.data
= rctx
->data
;
507 if (unlikely(err
== -EINPROGRESS
))
513 static void cryptd_hash_complete(struct ahash_request
*req
, int err
,
514 crypto_completion_t complete
)
516 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
517 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
518 int refcnt
= refcount_read(&ctx
->refcnt
);
521 ahash_request_complete(req
, err
);
524 if (err
== -EINPROGRESS
) {
525 req
->base
.complete
= complete
;
526 req
->base
.data
= req
;
527 } else if (refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
528 crypto_free_ahash(tfm
);
531 static void cryptd_hash_init(void *data
, int err
)
533 struct ahash_request
*req
= data
;
534 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
535 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
536 struct crypto_shash
*child
= ctx
->child
;
537 struct shash_desc
*desc
;
539 desc
= cryptd_hash_prepare(req
, err
);
545 err
= crypto_shash_init(desc
);
548 cryptd_hash_complete(req
, err
, cryptd_hash_init
);
551 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
553 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
556 static void cryptd_hash_update(void *data
, int err
)
558 struct ahash_request
*req
= data
;
559 struct shash_desc
*desc
;
561 desc
= cryptd_hash_prepare(req
, err
);
563 err
= shash_ahash_update(req
, desc
);
565 cryptd_hash_complete(req
, err
, cryptd_hash_update
);
568 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
570 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
573 static void cryptd_hash_final(void *data
, int err
)
575 struct ahash_request
*req
= data
;
576 struct shash_desc
*desc
;
578 desc
= cryptd_hash_prepare(req
, err
);
580 err
= crypto_shash_final(desc
, req
->result
);
582 cryptd_hash_complete(req
, err
, cryptd_hash_final
);
585 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
587 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
590 static void cryptd_hash_finup(void *data
, int err
)
592 struct ahash_request
*req
= data
;
593 struct shash_desc
*desc
;
595 desc
= cryptd_hash_prepare(req
, err
);
597 err
= shash_ahash_finup(req
, desc
);
599 cryptd_hash_complete(req
, err
, cryptd_hash_finup
);
602 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
604 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
607 static void cryptd_hash_digest(void *data
, int err
)
609 struct ahash_request
*req
= data
;
610 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
611 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
612 struct crypto_shash
*child
= ctx
->child
;
613 struct shash_desc
*desc
;
615 desc
= cryptd_hash_prepare(req
, err
);
621 err
= shash_ahash_digest(req
, desc
);
624 cryptd_hash_complete(req
, err
, cryptd_hash_digest
);
627 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
629 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
632 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
634 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
636 return crypto_shash_export(&rctx
->desc
, out
);
639 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
641 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
642 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
643 struct shash_desc
*desc
= cryptd_shash_desc(req
);
645 desc
->tfm
= ctx
->child
;
647 return crypto_shash_import(desc
, in
);
650 static void cryptd_hash_free(struct ahash_instance
*inst
)
652 struct hashd_instance_ctx
*ctx
= ahash_instance_ctx(inst
);
654 crypto_drop_shash(&ctx
->spawn
);
658 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
659 struct crypto_attr_type
*algt
,
660 struct cryptd_queue
*queue
)
662 struct hashd_instance_ctx
*ctx
;
663 struct ahash_instance
*inst
;
664 struct shash_alg
*alg
;
669 cryptd_type_and_mask(algt
, &type
, &mask
);
671 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
675 ctx
= ahash_instance_ctx(inst
);
678 err
= crypto_grab_shash(&ctx
->spawn
, ahash_crypto_instance(inst
),
679 crypto_attr_alg_name(tb
[1]), type
, mask
);
682 alg
= crypto_spawn_shash_alg(&ctx
->spawn
);
684 err
= cryptd_init_instance(ahash_crypto_instance(inst
), &alg
->base
);
688 inst
->alg
.halg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
689 (alg
->base
.cra_flags
& (CRYPTO_ALG_INTERNAL
|
690 CRYPTO_ALG_OPTIONAL_KEY
));
691 inst
->alg
.halg
.digestsize
= alg
->digestsize
;
692 inst
->alg
.halg
.statesize
= alg
->statesize
;
693 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
695 inst
->alg
.init_tfm
= cryptd_hash_init_tfm
;
696 inst
->alg
.clone_tfm
= cryptd_hash_clone_tfm
;
697 inst
->alg
.exit_tfm
= cryptd_hash_exit_tfm
;
699 inst
->alg
.init
= cryptd_hash_init_enqueue
;
700 inst
->alg
.update
= cryptd_hash_update_enqueue
;
701 inst
->alg
.final
= cryptd_hash_final_enqueue
;
702 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
703 inst
->alg
.export
= cryptd_hash_export
;
704 inst
->alg
.import
= cryptd_hash_import
;
705 if (crypto_shash_alg_has_setkey(alg
))
706 inst
->alg
.setkey
= cryptd_hash_setkey
;
707 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
709 inst
->free
= cryptd_hash_free
;
711 err
= ahash_register_instance(tmpl
, inst
);
714 cryptd_hash_free(inst
);
719 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
720 const u8
*key
, unsigned int keylen
)
722 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
723 struct crypto_aead
*child
= ctx
->child
;
725 return crypto_aead_setkey(child
, key
, keylen
);
728 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
729 unsigned int authsize
)
731 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
732 struct crypto_aead
*child
= ctx
->child
;
734 return crypto_aead_setauthsize(child
, authsize
);
737 static void cryptd_aead_crypt(struct aead_request
*req
,
738 struct crypto_aead
*child
, int err
,
739 int (*crypt
)(struct aead_request
*req
),
740 crypto_completion_t
compl)
742 struct cryptd_aead_request_ctx
*rctx
;
743 struct aead_request
*subreq
;
744 struct cryptd_aead_ctx
*ctx
;
745 struct crypto_aead
*tfm
;
748 rctx
= aead_request_ctx(req
);
750 req
->base
.complete
= subreq
->base
.complete
;
751 req
->base
.data
= subreq
->base
.data
;
753 tfm
= crypto_aead_reqtfm(req
);
755 if (unlikely(err
== -EINPROGRESS
))
758 aead_request_set_tfm(subreq
, child
);
759 aead_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
761 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
763 aead_request_set_ad(subreq
, req
->assoclen
);
768 ctx
= crypto_aead_ctx(tfm
);
769 refcnt
= refcount_read(&ctx
->refcnt
);
772 aead_request_complete(req
, err
);
775 if (err
== -EINPROGRESS
) {
776 subreq
->base
.complete
= req
->base
.complete
;
777 subreq
->base
.data
= req
->base
.data
;
778 req
->base
.complete
= compl;
779 req
->base
.data
= req
;
780 } else if (refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
781 crypto_free_aead(tfm
);
784 static void cryptd_aead_encrypt(void *data
, int err
)
786 struct aead_request
*req
= data
;
787 struct cryptd_aead_ctx
*ctx
;
788 struct crypto_aead
*child
;
790 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
792 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
,
793 cryptd_aead_encrypt
);
796 static void cryptd_aead_decrypt(void *data
, int err
)
798 struct aead_request
*req
= data
;
799 struct cryptd_aead_ctx
*ctx
;
800 struct crypto_aead
*child
;
802 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
804 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
,
805 cryptd_aead_decrypt
);
808 static int cryptd_aead_enqueue(struct aead_request
*req
,
809 crypto_completion_t
compl)
811 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
812 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
813 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
814 struct aead_request
*subreq
= &rctx
->req
;
816 subreq
->base
.complete
= req
->base
.complete
;
817 subreq
->base
.data
= req
->base
.data
;
818 req
->base
.complete
= compl;
819 req
->base
.data
= req
;
820 return cryptd_enqueue_request(queue
, &req
->base
);
823 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
825 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
828 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
830 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
833 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
835 struct aead_instance
*inst
= aead_alg_instance(tfm
);
836 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
837 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
838 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
839 struct crypto_aead
*cipher
;
841 cipher
= crypto_spawn_aead(spawn
);
843 return PTR_ERR(cipher
);
846 crypto_aead_set_reqsize(
847 tfm
, sizeof(struct cryptd_aead_request_ctx
) +
848 crypto_aead_reqsize(cipher
));
852 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
854 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
855 crypto_free_aead(ctx
->child
);
858 static void cryptd_aead_free(struct aead_instance
*inst
)
860 struct aead_instance_ctx
*ctx
= aead_instance_ctx(inst
);
862 crypto_drop_aead(&ctx
->aead_spawn
);
866 static int cryptd_create_aead(struct crypto_template
*tmpl
,
868 struct crypto_attr_type
*algt
,
869 struct cryptd_queue
*queue
)
871 struct aead_instance_ctx
*ctx
;
872 struct aead_instance
*inst
;
873 struct aead_alg
*alg
;
878 cryptd_type_and_mask(algt
, &type
, &mask
);
880 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
884 ctx
= aead_instance_ctx(inst
);
887 err
= crypto_grab_aead(&ctx
->aead_spawn
, aead_crypto_instance(inst
),
888 crypto_attr_alg_name(tb
[1]), type
, mask
);
892 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
893 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
897 inst
->alg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
898 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
899 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
901 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
902 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
904 inst
->alg
.init
= cryptd_aead_init_tfm
;
905 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
906 inst
->alg
.setkey
= cryptd_aead_setkey
;
907 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
908 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
909 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
911 inst
->free
= cryptd_aead_free
;
913 err
= aead_register_instance(tmpl
, inst
);
916 cryptd_aead_free(inst
);
921 static struct cryptd_queue queue
;
923 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
925 struct crypto_attr_type
*algt
;
927 algt
= crypto_get_attr_type(tb
);
929 return PTR_ERR(algt
);
931 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
932 case CRYPTO_ALG_TYPE_LSKCIPHER
:
933 return cryptd_create_skcipher(tmpl
, tb
, algt
, &queue
);
934 case CRYPTO_ALG_TYPE_HASH
:
935 return cryptd_create_hash(tmpl
, tb
, algt
, &queue
);
936 case CRYPTO_ALG_TYPE_AEAD
:
937 return cryptd_create_aead(tmpl
, tb
, algt
, &queue
);
943 static struct crypto_template cryptd_tmpl
= {
945 .create
= cryptd_create
,
946 .module
= THIS_MODULE
,
949 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
952 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
953 struct cryptd_skcipher_ctx
*ctx
;
954 struct crypto_skcipher
*tfm
;
956 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
957 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
958 return ERR_PTR(-EINVAL
);
960 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
962 return ERR_CAST(tfm
);
964 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
965 crypto_free_skcipher(tfm
);
966 return ERR_PTR(-EINVAL
);
969 ctx
= crypto_skcipher_ctx(tfm
);
970 refcount_set(&ctx
->refcnt
, 1);
972 return container_of(tfm
, struct cryptd_skcipher
, base
);
974 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
976 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
978 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
982 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
984 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
986 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
988 return refcount_read(&ctx
->refcnt
) - 1;
990 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
992 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
994 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
996 if (refcount_dec_and_test(&ctx
->refcnt
))
997 crypto_free_skcipher(&tfm
->base
);
999 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
1001 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
1004 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1005 struct cryptd_hash_ctx
*ctx
;
1006 struct crypto_ahash
*tfm
;
1008 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1009 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1010 return ERR_PTR(-EINVAL
);
1011 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1013 return ERR_CAST(tfm
);
1014 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1015 crypto_free_ahash(tfm
);
1016 return ERR_PTR(-EINVAL
);
1019 ctx
= crypto_ahash_ctx(tfm
);
1020 refcount_set(&ctx
->refcnt
, 1);
1022 return __cryptd_ahash_cast(tfm
);
1024 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1026 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1028 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1032 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1034 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1036 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1039 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1041 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1043 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1045 return refcount_read(&ctx
->refcnt
) - 1;
1047 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1049 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1051 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1053 if (refcount_dec_and_test(&ctx
->refcnt
))
1054 crypto_free_ahash(&tfm
->base
);
1056 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1058 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1061 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1062 struct cryptd_aead_ctx
*ctx
;
1063 struct crypto_aead
*tfm
;
1065 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1066 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1067 return ERR_PTR(-EINVAL
);
1068 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1070 return ERR_CAST(tfm
);
1071 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1072 crypto_free_aead(tfm
);
1073 return ERR_PTR(-EINVAL
);
1076 ctx
= crypto_aead_ctx(tfm
);
1077 refcount_set(&ctx
->refcnt
, 1);
1079 return __cryptd_aead_cast(tfm
);
1081 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1083 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1085 struct cryptd_aead_ctx
*ctx
;
1086 ctx
= crypto_aead_ctx(&tfm
->base
);
1089 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1091 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1093 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1095 return refcount_read(&ctx
->refcnt
) - 1;
1097 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1099 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1101 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1103 if (refcount_dec_and_test(&ctx
->refcnt
))
1104 crypto_free_aead(&tfm
->base
);
1106 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1108 static int __init
cryptd_init(void)
1112 cryptd_wq
= alloc_workqueue("cryptd", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
1117 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1119 goto err_destroy_wq
;
1121 err
= crypto_register_template(&cryptd_tmpl
);
1123 goto err_fini_queue
;
1128 cryptd_fini_queue(&queue
);
1130 destroy_workqueue(cryptd_wq
);
1134 static void __exit
cryptd_exit(void)
1136 destroy_workqueue(cryptd_wq
);
1137 cryptd_fini_queue(&queue
);
1138 crypto_unregister_template(&cryptd_tmpl
);
1141 subsys_initcall(cryptd_init
);
1142 module_exit(cryptd_exit
);
1144 MODULE_LICENSE("GPL");
1145 MODULE_DESCRIPTION("Software async crypto daemon");
1146 MODULE_ALIAS_CRYPTO("cryptd");