1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Software async crypto daemon.
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * Added AEAD support to cryptd.
8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9 * Adrian Hoban <adrian.hoban@intel.com>
10 * Gabriele Paoloni <gabriele.paoloni@intel.com>
11 * Aidan O'Mahony (aidan.o.mahony@intel.com)
12 * Copyright (c) 2010, Intel Corporation.
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/cryptd.h>
19 #include <linux/refcount.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/module.h>
25 #include <linux/scatterlist.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 #include <linux/workqueue.h>
30 static unsigned int cryptd_max_cpu_qlen
= 1000;
31 module_param(cryptd_max_cpu_qlen
, uint
, 0);
32 MODULE_PARM_DESC(cryptd_max_cpu_qlen
, "Set cryptd Max queue depth");
34 static struct workqueue_struct
*cryptd_wq
;
36 struct cryptd_cpu_queue
{
37 struct crypto_queue queue
;
38 struct work_struct work
;
42 struct cryptd_cpu_queue __percpu
*cpu_queue
;
45 struct cryptd_instance_ctx
{
46 struct crypto_spawn spawn
;
47 struct cryptd_queue
*queue
;
50 struct skcipherd_instance_ctx
{
51 struct crypto_skcipher_spawn spawn
;
52 struct cryptd_queue
*queue
;
55 struct hashd_instance_ctx
{
56 struct crypto_shash_spawn spawn
;
57 struct cryptd_queue
*queue
;
60 struct aead_instance_ctx
{
61 struct crypto_aead_spawn aead_spawn
;
62 struct cryptd_queue
*queue
;
65 struct cryptd_skcipher_ctx
{
67 struct crypto_sync_skcipher
*child
;
70 struct cryptd_skcipher_request_ctx
{
71 crypto_completion_t complete
;
74 struct cryptd_hash_ctx
{
76 struct crypto_shash
*child
;
79 struct cryptd_hash_request_ctx
{
80 crypto_completion_t complete
;
81 struct shash_desc desc
;
84 struct cryptd_aead_ctx
{
86 struct crypto_aead
*child
;
89 struct cryptd_aead_request_ctx
{
90 crypto_completion_t complete
;
93 static void cryptd_queue_worker(struct work_struct
*work
);
95 static int cryptd_init_queue(struct cryptd_queue
*queue
,
96 unsigned int max_cpu_qlen
)
99 struct cryptd_cpu_queue
*cpu_queue
;
101 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
102 if (!queue
->cpu_queue
)
104 for_each_possible_cpu(cpu
) {
105 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
106 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
107 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
109 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen
);
113 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
116 struct cryptd_cpu_queue
*cpu_queue
;
118 for_each_possible_cpu(cpu
) {
119 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
120 BUG_ON(cpu_queue
->queue
.qlen
);
122 free_percpu(queue
->cpu_queue
);
125 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
126 struct crypto_async_request
*request
)
129 struct cryptd_cpu_queue
*cpu_queue
;
133 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
134 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
136 refcnt
= crypto_tfm_ctx(request
->tfm
);
141 queue_work_on(cpu
, cryptd_wq
, &cpu_queue
->work
);
143 if (!refcount_read(refcnt
))
146 refcount_inc(refcnt
);
154 /* Called in workqueue context, do one real cryption work (via
155 * req->complete) and reschedule itself if there are more work to
157 static void cryptd_queue_worker(struct work_struct
*work
)
159 struct cryptd_cpu_queue
*cpu_queue
;
160 struct crypto_async_request
*req
, *backlog
;
162 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
164 * Only handle one request at a time to avoid hogging crypto workqueue.
165 * preempt_disable/enable is used to prevent being preempted by
166 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
167 * cryptd_enqueue_request() being accessed from software interrupts.
171 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
172 req
= crypto_dequeue_request(&cpu_queue
->queue
);
180 backlog
->complete(backlog
, -EINPROGRESS
);
181 req
->complete(req
, 0);
183 if (cpu_queue
->queue
.qlen
)
184 queue_work(cryptd_wq
, &cpu_queue
->work
);
187 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
189 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
190 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
194 static void cryptd_type_and_mask(struct crypto_attr_type
*algt
,
195 u32
*type
, u32
*mask
)
198 * cryptd is allowed to wrap internal algorithms, but in that case the
199 * resulting cryptd instance will be marked as internal as well.
201 *type
= algt
->type
& CRYPTO_ALG_INTERNAL
;
202 *mask
= algt
->mask
& CRYPTO_ALG_INTERNAL
;
204 /* No point in cryptd wrapping an algorithm that's already async. */
205 *mask
|= CRYPTO_ALG_ASYNC
;
207 *mask
|= crypto_algt_inherited_mask(algt
);
210 static int cryptd_init_instance(struct crypto_instance
*inst
,
211 struct crypto_alg
*alg
)
213 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
215 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
216 return -ENAMETOOLONG
;
218 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
220 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
221 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
222 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
227 static int cryptd_skcipher_setkey(struct crypto_skcipher
*parent
,
228 const u8
*key
, unsigned int keylen
)
230 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(parent
);
231 struct crypto_sync_skcipher
*child
= ctx
->child
;
233 crypto_sync_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
234 crypto_sync_skcipher_set_flags(child
,
235 crypto_skcipher_get_flags(parent
) &
236 CRYPTO_TFM_REQ_MASK
);
237 return crypto_sync_skcipher_setkey(child
, key
, keylen
);
240 static void cryptd_skcipher_complete(struct skcipher_request
*req
, int err
)
242 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
243 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
244 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
245 int refcnt
= refcount_read(&ctx
->refcnt
);
248 rctx
->complete(&req
->base
, err
);
251 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
252 crypto_free_skcipher(tfm
);
255 static void cryptd_skcipher_encrypt(struct crypto_async_request
*base
,
258 struct skcipher_request
*req
= skcipher_request_cast(base
);
259 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
260 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
261 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
262 struct crypto_sync_skcipher
*child
= ctx
->child
;
263 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
265 if (unlikely(err
== -EINPROGRESS
))
268 skcipher_request_set_sync_tfm(subreq
, child
);
269 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
271 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
274 err
= crypto_skcipher_encrypt(subreq
);
275 skcipher_request_zero(subreq
);
277 req
->base
.complete
= rctx
->complete
;
280 cryptd_skcipher_complete(req
, err
);
283 static void cryptd_skcipher_decrypt(struct crypto_async_request
*base
,
286 struct skcipher_request
*req
= skcipher_request_cast(base
);
287 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
288 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
289 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
290 struct crypto_sync_skcipher
*child
= ctx
->child
;
291 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, child
);
293 if (unlikely(err
== -EINPROGRESS
))
296 skcipher_request_set_sync_tfm(subreq
, child
);
297 skcipher_request_set_callback(subreq
, CRYPTO_TFM_REQ_MAY_SLEEP
,
299 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
302 err
= crypto_skcipher_decrypt(subreq
);
303 skcipher_request_zero(subreq
);
305 req
->base
.complete
= rctx
->complete
;
308 cryptd_skcipher_complete(req
, err
);
311 static int cryptd_skcipher_enqueue(struct skcipher_request
*req
,
312 crypto_completion_t
compl)
314 struct cryptd_skcipher_request_ctx
*rctx
= skcipher_request_ctx(req
);
315 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
316 struct cryptd_queue
*queue
;
318 queue
= cryptd_get_queue(crypto_skcipher_tfm(tfm
));
319 rctx
->complete
= req
->base
.complete
;
320 req
->base
.complete
= compl;
322 return cryptd_enqueue_request(queue
, &req
->base
);
325 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request
*req
)
327 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_encrypt
);
330 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request
*req
)
332 return cryptd_skcipher_enqueue(req
, cryptd_skcipher_decrypt
);
335 static int cryptd_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
337 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
338 struct skcipherd_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
339 struct crypto_skcipher_spawn
*spawn
= &ictx
->spawn
;
340 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
341 struct crypto_skcipher
*cipher
;
343 cipher
= crypto_spawn_skcipher(spawn
);
345 return PTR_ERR(cipher
);
347 ctx
->child
= (struct crypto_sync_skcipher
*)cipher
;
348 crypto_skcipher_set_reqsize(
349 tfm
, sizeof(struct cryptd_skcipher_request_ctx
));
353 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
355 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
357 crypto_free_sync_skcipher(ctx
->child
);
360 static void cryptd_skcipher_free(struct skcipher_instance
*inst
)
362 struct skcipherd_instance_ctx
*ctx
= skcipher_instance_ctx(inst
);
364 crypto_drop_skcipher(&ctx
->spawn
);
368 static int cryptd_create_skcipher(struct crypto_template
*tmpl
,
370 struct crypto_attr_type
*algt
,
371 struct cryptd_queue
*queue
)
373 struct skcipherd_instance_ctx
*ctx
;
374 struct skcipher_instance
*inst
;
375 struct skcipher_alg
*alg
;
380 cryptd_type_and_mask(algt
, &type
, &mask
);
382 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
386 ctx
= skcipher_instance_ctx(inst
);
389 err
= crypto_grab_skcipher(&ctx
->spawn
, skcipher_crypto_instance(inst
),
390 crypto_attr_alg_name(tb
[1]), type
, mask
);
394 alg
= crypto_spawn_skcipher_alg(&ctx
->spawn
);
395 err
= cryptd_init_instance(skcipher_crypto_instance(inst
), &alg
->base
);
399 inst
->alg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
400 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
401 inst
->alg
.ivsize
= crypto_skcipher_alg_ivsize(alg
);
402 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(alg
);
403 inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(alg
);
404 inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(alg
);
406 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_skcipher_ctx
);
408 inst
->alg
.init
= cryptd_skcipher_init_tfm
;
409 inst
->alg
.exit
= cryptd_skcipher_exit_tfm
;
411 inst
->alg
.setkey
= cryptd_skcipher_setkey
;
412 inst
->alg
.encrypt
= cryptd_skcipher_encrypt_enqueue
;
413 inst
->alg
.decrypt
= cryptd_skcipher_decrypt_enqueue
;
415 inst
->free
= cryptd_skcipher_free
;
417 err
= skcipher_register_instance(tmpl
, inst
);
420 cryptd_skcipher_free(inst
);
425 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
427 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
428 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
429 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
430 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
431 struct crypto_shash
*hash
;
433 hash
= crypto_spawn_shash(spawn
);
435 return PTR_ERR(hash
);
438 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
439 sizeof(struct cryptd_hash_request_ctx
) +
440 crypto_shash_descsize(hash
));
444 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
446 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
448 crypto_free_shash(ctx
->child
);
451 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
452 const u8
*key
, unsigned int keylen
)
454 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
455 struct crypto_shash
*child
= ctx
->child
;
457 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
458 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
459 CRYPTO_TFM_REQ_MASK
);
460 return crypto_shash_setkey(child
, key
, keylen
);
463 static int cryptd_hash_enqueue(struct ahash_request
*req
,
464 crypto_completion_t
compl)
466 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
467 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
468 struct cryptd_queue
*queue
=
469 cryptd_get_queue(crypto_ahash_tfm(tfm
));
471 rctx
->complete
= req
->base
.complete
;
472 req
->base
.complete
= compl;
474 return cryptd_enqueue_request(queue
, &req
->base
);
477 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
479 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
480 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
481 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
482 int refcnt
= refcount_read(&ctx
->refcnt
);
485 rctx
->complete(&req
->base
, err
);
488 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
489 crypto_free_ahash(tfm
);
492 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
494 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
495 struct crypto_shash
*child
= ctx
->child
;
496 struct ahash_request
*req
= ahash_request_cast(req_async
);
497 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
498 struct shash_desc
*desc
= &rctx
->desc
;
500 if (unlikely(err
== -EINPROGRESS
))
505 err
= crypto_shash_init(desc
);
507 req
->base
.complete
= rctx
->complete
;
510 cryptd_hash_complete(req
, err
);
513 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
515 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
518 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
520 struct ahash_request
*req
= ahash_request_cast(req_async
);
521 struct cryptd_hash_request_ctx
*rctx
;
523 rctx
= ahash_request_ctx(req
);
525 if (unlikely(err
== -EINPROGRESS
))
528 err
= shash_ahash_update(req
, &rctx
->desc
);
530 req
->base
.complete
= rctx
->complete
;
533 cryptd_hash_complete(req
, err
);
536 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
538 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
541 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
543 struct ahash_request
*req
= ahash_request_cast(req_async
);
544 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
546 if (unlikely(err
== -EINPROGRESS
))
549 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
551 req
->base
.complete
= rctx
->complete
;
554 cryptd_hash_complete(req
, err
);
557 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
559 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
562 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
564 struct ahash_request
*req
= ahash_request_cast(req_async
);
565 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
567 if (unlikely(err
== -EINPROGRESS
))
570 err
= shash_ahash_finup(req
, &rctx
->desc
);
572 req
->base
.complete
= rctx
->complete
;
575 cryptd_hash_complete(req
, err
);
578 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
580 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
583 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
585 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
586 struct crypto_shash
*child
= ctx
->child
;
587 struct ahash_request
*req
= ahash_request_cast(req_async
);
588 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
589 struct shash_desc
*desc
= &rctx
->desc
;
591 if (unlikely(err
== -EINPROGRESS
))
596 err
= shash_ahash_digest(req
, desc
);
598 req
->base
.complete
= rctx
->complete
;
601 cryptd_hash_complete(req
, err
);
604 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
606 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
609 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
611 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
613 return crypto_shash_export(&rctx
->desc
, out
);
616 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
618 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
619 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
620 struct shash_desc
*desc
= cryptd_shash_desc(req
);
622 desc
->tfm
= ctx
->child
;
624 return crypto_shash_import(desc
, in
);
627 static void cryptd_hash_free(struct ahash_instance
*inst
)
629 struct hashd_instance_ctx
*ctx
= ahash_instance_ctx(inst
);
631 crypto_drop_shash(&ctx
->spawn
);
635 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
636 struct crypto_attr_type
*algt
,
637 struct cryptd_queue
*queue
)
639 struct hashd_instance_ctx
*ctx
;
640 struct ahash_instance
*inst
;
641 struct shash_alg
*alg
;
646 cryptd_type_and_mask(algt
, &type
, &mask
);
648 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
652 ctx
= ahash_instance_ctx(inst
);
655 err
= crypto_grab_shash(&ctx
->spawn
, ahash_crypto_instance(inst
),
656 crypto_attr_alg_name(tb
[1]), type
, mask
);
659 alg
= crypto_spawn_shash_alg(&ctx
->spawn
);
661 err
= cryptd_init_instance(ahash_crypto_instance(inst
), &alg
->base
);
665 inst
->alg
.halg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
666 (alg
->base
.cra_flags
& (CRYPTO_ALG_INTERNAL
|
667 CRYPTO_ALG_OPTIONAL_KEY
));
668 inst
->alg
.halg
.digestsize
= alg
->digestsize
;
669 inst
->alg
.halg
.statesize
= alg
->statesize
;
670 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
672 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
673 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
675 inst
->alg
.init
= cryptd_hash_init_enqueue
;
676 inst
->alg
.update
= cryptd_hash_update_enqueue
;
677 inst
->alg
.final
= cryptd_hash_final_enqueue
;
678 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
679 inst
->alg
.export
= cryptd_hash_export
;
680 inst
->alg
.import
= cryptd_hash_import
;
681 if (crypto_shash_alg_has_setkey(alg
))
682 inst
->alg
.setkey
= cryptd_hash_setkey
;
683 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
685 inst
->free
= cryptd_hash_free
;
687 err
= ahash_register_instance(tmpl
, inst
);
690 cryptd_hash_free(inst
);
695 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
696 const u8
*key
, unsigned int keylen
)
698 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
699 struct crypto_aead
*child
= ctx
->child
;
701 return crypto_aead_setkey(child
, key
, keylen
);
704 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
705 unsigned int authsize
)
707 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
708 struct crypto_aead
*child
= ctx
->child
;
710 return crypto_aead_setauthsize(child
, authsize
);
713 static void cryptd_aead_crypt(struct aead_request
*req
,
714 struct crypto_aead
*child
,
716 int (*crypt
)(struct aead_request
*req
))
718 struct cryptd_aead_request_ctx
*rctx
;
719 struct cryptd_aead_ctx
*ctx
;
720 crypto_completion_t
compl;
721 struct crypto_aead
*tfm
;
724 rctx
= aead_request_ctx(req
);
725 compl = rctx
->complete
;
727 tfm
= crypto_aead_reqtfm(req
);
729 if (unlikely(err
== -EINPROGRESS
))
731 aead_request_set_tfm(req
, child
);
735 ctx
= crypto_aead_ctx(tfm
);
736 refcnt
= refcount_read(&ctx
->refcnt
);
739 compl(&req
->base
, err
);
742 if (err
!= -EINPROGRESS
&& refcnt
&& refcount_dec_and_test(&ctx
->refcnt
))
743 crypto_free_aead(tfm
);
746 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
748 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
749 struct crypto_aead
*child
= ctx
->child
;
750 struct aead_request
*req
;
752 req
= container_of(areq
, struct aead_request
, base
);
753 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
756 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
758 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
759 struct crypto_aead
*child
= ctx
->child
;
760 struct aead_request
*req
;
762 req
= container_of(areq
, struct aead_request
, base
);
763 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
766 static int cryptd_aead_enqueue(struct aead_request
*req
,
767 crypto_completion_t
compl)
769 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
770 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
771 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
773 rctx
->complete
= req
->base
.complete
;
774 req
->base
.complete
= compl;
775 return cryptd_enqueue_request(queue
, &req
->base
);
778 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
780 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
783 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
785 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
788 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
790 struct aead_instance
*inst
= aead_alg_instance(tfm
);
791 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
792 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
793 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
794 struct crypto_aead
*cipher
;
796 cipher
= crypto_spawn_aead(spawn
);
798 return PTR_ERR(cipher
);
801 crypto_aead_set_reqsize(
802 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
803 crypto_aead_reqsize(cipher
)));
807 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
809 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
810 crypto_free_aead(ctx
->child
);
813 static void cryptd_aead_free(struct aead_instance
*inst
)
815 struct aead_instance_ctx
*ctx
= aead_instance_ctx(inst
);
817 crypto_drop_aead(&ctx
->aead_spawn
);
821 static int cryptd_create_aead(struct crypto_template
*tmpl
,
823 struct crypto_attr_type
*algt
,
824 struct cryptd_queue
*queue
)
826 struct aead_instance_ctx
*ctx
;
827 struct aead_instance
*inst
;
828 struct aead_alg
*alg
;
833 cryptd_type_and_mask(algt
, &type
, &mask
);
835 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
839 ctx
= aead_instance_ctx(inst
);
842 err
= crypto_grab_aead(&ctx
->aead_spawn
, aead_crypto_instance(inst
),
843 crypto_attr_alg_name(tb
[1]), type
, mask
);
847 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
848 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
852 inst
->alg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
|
853 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
854 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
856 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
857 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
859 inst
->alg
.init
= cryptd_aead_init_tfm
;
860 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
861 inst
->alg
.setkey
= cryptd_aead_setkey
;
862 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
863 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
864 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
866 inst
->free
= cryptd_aead_free
;
868 err
= aead_register_instance(tmpl
, inst
);
871 cryptd_aead_free(inst
);
876 static struct cryptd_queue queue
;
878 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
880 struct crypto_attr_type
*algt
;
882 algt
= crypto_get_attr_type(tb
);
884 return PTR_ERR(algt
);
886 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
887 case CRYPTO_ALG_TYPE_SKCIPHER
:
888 return cryptd_create_skcipher(tmpl
, tb
, algt
, &queue
);
889 case CRYPTO_ALG_TYPE_HASH
:
890 return cryptd_create_hash(tmpl
, tb
, algt
, &queue
);
891 case CRYPTO_ALG_TYPE_AEAD
:
892 return cryptd_create_aead(tmpl
, tb
, algt
, &queue
);
898 static struct crypto_template cryptd_tmpl
= {
900 .create
= cryptd_create
,
901 .module
= THIS_MODULE
,
904 struct cryptd_skcipher
*cryptd_alloc_skcipher(const char *alg_name
,
907 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
908 struct cryptd_skcipher_ctx
*ctx
;
909 struct crypto_skcipher
*tfm
;
911 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
912 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
913 return ERR_PTR(-EINVAL
);
915 tfm
= crypto_alloc_skcipher(cryptd_alg_name
, type
, mask
);
917 return ERR_CAST(tfm
);
919 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
920 crypto_free_skcipher(tfm
);
921 return ERR_PTR(-EINVAL
);
924 ctx
= crypto_skcipher_ctx(tfm
);
925 refcount_set(&ctx
->refcnt
, 1);
927 return container_of(tfm
, struct cryptd_skcipher
, base
);
929 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher
);
931 struct crypto_skcipher
*cryptd_skcipher_child(struct cryptd_skcipher
*tfm
)
933 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
935 return &ctx
->child
->base
;
937 EXPORT_SYMBOL_GPL(cryptd_skcipher_child
);
939 bool cryptd_skcipher_queued(struct cryptd_skcipher
*tfm
)
941 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
943 return refcount_read(&ctx
->refcnt
) - 1;
945 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued
);
947 void cryptd_free_skcipher(struct cryptd_skcipher
*tfm
)
949 struct cryptd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(&tfm
->base
);
951 if (refcount_dec_and_test(&ctx
->refcnt
))
952 crypto_free_skcipher(&tfm
->base
);
954 EXPORT_SYMBOL_GPL(cryptd_free_skcipher
);
956 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
959 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
960 struct cryptd_hash_ctx
*ctx
;
961 struct crypto_ahash
*tfm
;
963 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
964 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
965 return ERR_PTR(-EINVAL
);
966 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
968 return ERR_CAST(tfm
);
969 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
970 crypto_free_ahash(tfm
);
971 return ERR_PTR(-EINVAL
);
974 ctx
= crypto_ahash_ctx(tfm
);
975 refcount_set(&ctx
->refcnt
, 1);
977 return __cryptd_ahash_cast(tfm
);
979 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
981 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
983 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
987 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
989 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
991 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
994 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
996 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
998 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1000 return refcount_read(&ctx
->refcnt
) - 1;
1002 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1004 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1006 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1008 if (refcount_dec_and_test(&ctx
->refcnt
))
1009 crypto_free_ahash(&tfm
->base
);
1011 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1013 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1016 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1017 struct cryptd_aead_ctx
*ctx
;
1018 struct crypto_aead
*tfm
;
1020 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1021 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1022 return ERR_PTR(-EINVAL
);
1023 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1025 return ERR_CAST(tfm
);
1026 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1027 crypto_free_aead(tfm
);
1028 return ERR_PTR(-EINVAL
);
1031 ctx
= crypto_aead_ctx(tfm
);
1032 refcount_set(&ctx
->refcnt
, 1);
1034 return __cryptd_aead_cast(tfm
);
1036 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1038 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1040 struct cryptd_aead_ctx
*ctx
;
1041 ctx
= crypto_aead_ctx(&tfm
->base
);
1044 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1046 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1048 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1050 return refcount_read(&ctx
->refcnt
) - 1;
1052 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1054 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1056 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1058 if (refcount_dec_and_test(&ctx
->refcnt
))
1059 crypto_free_aead(&tfm
->base
);
1061 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1063 static int __init
cryptd_init(void)
1067 cryptd_wq
= alloc_workqueue("cryptd", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
1072 err
= cryptd_init_queue(&queue
, cryptd_max_cpu_qlen
);
1074 goto err_destroy_wq
;
1076 err
= crypto_register_template(&cryptd_tmpl
);
1078 goto err_fini_queue
;
1083 cryptd_fini_queue(&queue
);
1085 destroy_workqueue(cryptd_wq
);
1089 static void __exit
cryptd_exit(void)
1091 destroy_workqueue(cryptd_wq
);
1092 cryptd_fini_queue(&queue
);
1093 crypto_unregister_template(&cryptd_tmpl
);
1096 subsys_initcall(cryptd_init
);
1097 module_exit(cryptd_exit
);
1099 MODULE_LICENSE("GPL");
1100 MODULE_DESCRIPTION("Software async crypto daemon");
1101 MODULE_ALIAS_CRYPTO("cryptd");