2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/algapi.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/err.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/list.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/sched.h>
32 #include <linux/slab.h>
34 #define CRYPTD_MAX_CPU_QLEN 100
36 struct cryptd_cpu_queue
{
37 struct crypto_queue queue
;
38 struct work_struct work
;
42 struct cryptd_cpu_queue __percpu
*cpu_queue
;
45 struct cryptd_instance_ctx
{
46 struct crypto_spawn spawn
;
47 struct cryptd_queue
*queue
;
50 struct hashd_instance_ctx
{
51 struct crypto_shash_spawn spawn
;
52 struct cryptd_queue
*queue
;
55 struct aead_instance_ctx
{
56 struct crypto_aead_spawn aead_spawn
;
57 struct cryptd_queue
*queue
;
60 struct cryptd_blkcipher_ctx
{
61 struct crypto_blkcipher
*child
;
64 struct cryptd_blkcipher_request_ctx
{
65 crypto_completion_t complete
;
68 struct cryptd_hash_ctx
{
69 struct crypto_shash
*child
;
72 struct cryptd_hash_request_ctx
{
73 crypto_completion_t complete
;
74 struct shash_desc desc
;
77 struct cryptd_aead_ctx
{
78 struct crypto_aead
*child
;
81 struct cryptd_aead_request_ctx
{
82 crypto_completion_t complete
;
85 static void cryptd_queue_worker(struct work_struct
*work
);
87 static int cryptd_init_queue(struct cryptd_queue
*queue
,
88 unsigned int max_cpu_qlen
)
91 struct cryptd_cpu_queue
*cpu_queue
;
93 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
94 if (!queue
->cpu_queue
)
96 for_each_possible_cpu(cpu
) {
97 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
98 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
99 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
104 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
107 struct cryptd_cpu_queue
*cpu_queue
;
109 for_each_possible_cpu(cpu
) {
110 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
111 BUG_ON(cpu_queue
->queue
.qlen
);
113 free_percpu(queue
->cpu_queue
);
116 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
117 struct crypto_async_request
*request
)
120 struct cryptd_cpu_queue
*cpu_queue
;
123 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
124 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
125 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
131 /* Called in workqueue context, do one real cryption work (via
132 * req->complete) and reschedule itself if there are more work to
134 static void cryptd_queue_worker(struct work_struct
*work
)
136 struct cryptd_cpu_queue
*cpu_queue
;
137 struct crypto_async_request
*req
, *backlog
;
139 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
141 * Only handle one request at a time to avoid hogging crypto workqueue.
142 * preempt_disable/enable is used to prevent being preempted by
143 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
144 * cryptd_enqueue_request() being accessed from software interrupts.
148 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
149 req
= crypto_dequeue_request(&cpu_queue
->queue
);
157 backlog
->complete(backlog
, -EINPROGRESS
);
158 req
->complete(req
, 0);
160 if (cpu_queue
->queue
.qlen
)
161 queue_work(kcrypto_wq
, &cpu_queue
->work
);
164 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
166 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
167 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
171 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
174 struct crypto_attr_type
*algt
;
176 algt
= crypto_get_attr_type(tb
);
180 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
181 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
184 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
185 const u8
*key
, unsigned int keylen
)
187 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
188 struct crypto_blkcipher
*child
= ctx
->child
;
191 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
192 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
193 CRYPTO_TFM_REQ_MASK
);
194 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
195 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
196 CRYPTO_TFM_RES_MASK
);
200 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
201 struct crypto_blkcipher
*child
,
203 int (*crypt
)(struct blkcipher_desc
*desc
,
204 struct scatterlist
*dst
,
205 struct scatterlist
*src
,
208 struct cryptd_blkcipher_request_ctx
*rctx
;
209 struct blkcipher_desc desc
;
211 rctx
= ablkcipher_request_ctx(req
);
213 if (unlikely(err
== -EINPROGRESS
))
217 desc
.info
= req
->info
;
218 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
220 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
222 req
->base
.complete
= rctx
->complete
;
226 rctx
->complete(&req
->base
, err
);
230 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
232 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
233 struct crypto_blkcipher
*child
= ctx
->child
;
235 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
236 crypto_blkcipher_crt(child
)->encrypt
);
239 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
241 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
242 struct crypto_blkcipher
*child
= ctx
->child
;
244 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
245 crypto_blkcipher_crt(child
)->decrypt
);
248 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
249 crypto_completion_t
compl)
251 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
252 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
253 struct cryptd_queue
*queue
;
255 queue
= cryptd_get_queue(crypto_ablkcipher_tfm(tfm
));
256 rctx
->complete
= req
->base
.complete
;
257 req
->base
.complete
= compl;
259 return cryptd_enqueue_request(queue
, &req
->base
);
262 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
264 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
267 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
269 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
272 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
274 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
275 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
276 struct crypto_spawn
*spawn
= &ictx
->spawn
;
277 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
278 struct crypto_blkcipher
*cipher
;
280 cipher
= crypto_spawn_blkcipher(spawn
);
282 return PTR_ERR(cipher
);
285 tfm
->crt_ablkcipher
.reqsize
=
286 sizeof(struct cryptd_blkcipher_request_ctx
);
290 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
292 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
294 crypto_free_blkcipher(ctx
->child
);
297 static int cryptd_init_instance(struct crypto_instance
*inst
,
298 struct crypto_alg
*alg
)
300 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
302 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
303 return -ENAMETOOLONG
;
305 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
307 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
308 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
309 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
314 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
318 struct crypto_instance
*inst
;
321 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
323 return ERR_PTR(-ENOMEM
);
325 inst
= (void *)(p
+ head
);
327 err
= cryptd_init_instance(inst
, alg
);
340 static int cryptd_create_blkcipher(struct crypto_template
*tmpl
,
342 struct cryptd_queue
*queue
)
344 struct cryptd_instance_ctx
*ctx
;
345 struct crypto_instance
*inst
;
346 struct crypto_alg
*alg
;
347 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
348 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
351 cryptd_check_internal(tb
, &type
, &mask
);
353 alg
= crypto_get_attr_alg(tb
, type
, mask
);
357 inst
= cryptd_alloc_instance(alg
, 0, sizeof(*ctx
));
362 ctx
= crypto_instance_ctx(inst
);
365 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
366 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
370 type
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
371 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
372 type
|= CRYPTO_ALG_INTERNAL
;
373 inst
->alg
.cra_flags
= type
;
374 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
376 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
377 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
378 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
380 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
382 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
384 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
385 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
387 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
388 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
389 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
391 err
= crypto_register_instance(tmpl
, inst
);
393 crypto_drop_spawn(&ctx
->spawn
);
403 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
405 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
406 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
407 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
408 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
409 struct crypto_shash
*hash
;
411 hash
= crypto_spawn_shash(spawn
);
413 return PTR_ERR(hash
);
416 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
417 sizeof(struct cryptd_hash_request_ctx
) +
418 crypto_shash_descsize(hash
));
422 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
424 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
426 crypto_free_shash(ctx
->child
);
429 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
430 const u8
*key
, unsigned int keylen
)
432 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
433 struct crypto_shash
*child
= ctx
->child
;
436 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
437 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
438 CRYPTO_TFM_REQ_MASK
);
439 err
= crypto_shash_setkey(child
, key
, keylen
);
440 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
441 CRYPTO_TFM_RES_MASK
);
445 static int cryptd_hash_enqueue(struct ahash_request
*req
,
446 crypto_completion_t
compl)
448 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
449 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
450 struct cryptd_queue
*queue
=
451 cryptd_get_queue(crypto_ahash_tfm(tfm
));
453 rctx
->complete
= req
->base
.complete
;
454 req
->base
.complete
= compl;
456 return cryptd_enqueue_request(queue
, &req
->base
);
459 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
461 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
462 struct crypto_shash
*child
= ctx
->child
;
463 struct ahash_request
*req
= ahash_request_cast(req_async
);
464 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
465 struct shash_desc
*desc
= &rctx
->desc
;
467 if (unlikely(err
== -EINPROGRESS
))
471 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
473 err
= crypto_shash_init(desc
);
475 req
->base
.complete
= rctx
->complete
;
479 rctx
->complete(&req
->base
, err
);
483 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
485 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
488 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
490 struct ahash_request
*req
= ahash_request_cast(req_async
);
491 struct cryptd_hash_request_ctx
*rctx
;
493 rctx
= ahash_request_ctx(req
);
495 if (unlikely(err
== -EINPROGRESS
))
498 err
= shash_ahash_update(req
, &rctx
->desc
);
500 req
->base
.complete
= rctx
->complete
;
504 rctx
->complete(&req
->base
, err
);
508 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
510 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
513 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
515 struct ahash_request
*req
= ahash_request_cast(req_async
);
516 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
518 if (unlikely(err
== -EINPROGRESS
))
521 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
523 req
->base
.complete
= rctx
->complete
;
527 rctx
->complete(&req
->base
, err
);
531 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
533 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
536 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
538 struct ahash_request
*req
= ahash_request_cast(req_async
);
539 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
541 if (unlikely(err
== -EINPROGRESS
))
544 err
= shash_ahash_finup(req
, &rctx
->desc
);
546 req
->base
.complete
= rctx
->complete
;
550 rctx
->complete(&req
->base
, err
);
554 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
556 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
559 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
561 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
562 struct crypto_shash
*child
= ctx
->child
;
563 struct ahash_request
*req
= ahash_request_cast(req_async
);
564 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
565 struct shash_desc
*desc
= &rctx
->desc
;
567 if (unlikely(err
== -EINPROGRESS
))
571 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
573 err
= shash_ahash_digest(req
, desc
);
575 req
->base
.complete
= rctx
->complete
;
579 rctx
->complete(&req
->base
, err
);
583 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
585 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
588 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
590 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
592 return crypto_shash_export(&rctx
->desc
, out
);
595 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
597 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
598 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
599 struct shash_desc
*desc
= cryptd_shash_desc(req
);
601 desc
->tfm
= ctx
->child
;
602 desc
->flags
= req
->base
.flags
;
604 return crypto_shash_import(desc
, in
);
607 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
608 struct cryptd_queue
*queue
)
610 struct hashd_instance_ctx
*ctx
;
611 struct ahash_instance
*inst
;
612 struct shash_alg
*salg
;
613 struct crypto_alg
*alg
;
618 cryptd_check_internal(tb
, &type
, &mask
);
620 salg
= shash_attr_alg(tb
[1], type
, mask
);
622 return PTR_ERR(salg
);
625 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
631 ctx
= ahash_instance_ctx(inst
);
634 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
635 ahash_crypto_instance(inst
));
639 type
= CRYPTO_ALG_ASYNC
;
640 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
641 type
|= CRYPTO_ALG_INTERNAL
;
642 inst
->alg
.halg
.base
.cra_flags
= type
;
644 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
645 inst
->alg
.halg
.statesize
= salg
->statesize
;
646 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
648 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
649 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
651 inst
->alg
.init
= cryptd_hash_init_enqueue
;
652 inst
->alg
.update
= cryptd_hash_update_enqueue
;
653 inst
->alg
.final
= cryptd_hash_final_enqueue
;
654 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
655 inst
->alg
.export
= cryptd_hash_export
;
656 inst
->alg
.import
= cryptd_hash_import
;
657 inst
->alg
.setkey
= cryptd_hash_setkey
;
658 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
660 err
= ahash_register_instance(tmpl
, inst
);
662 crypto_drop_shash(&ctx
->spawn
);
672 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
673 const u8
*key
, unsigned int keylen
)
675 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
676 struct crypto_aead
*child
= ctx
->child
;
678 return crypto_aead_setkey(child
, key
, keylen
);
681 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
682 unsigned int authsize
)
684 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
685 struct crypto_aead
*child
= ctx
->child
;
687 return crypto_aead_setauthsize(child
, authsize
);
690 static void cryptd_aead_crypt(struct aead_request
*req
,
691 struct crypto_aead
*child
,
693 int (*crypt
)(struct aead_request
*req
))
695 struct cryptd_aead_request_ctx
*rctx
;
696 crypto_completion_t
compl;
698 rctx
= aead_request_ctx(req
);
699 compl = rctx
->complete
;
701 if (unlikely(err
== -EINPROGRESS
))
703 aead_request_set_tfm(req
, child
);
707 compl(&req
->base
, err
);
711 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
713 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
714 struct crypto_aead
*child
= ctx
->child
;
715 struct aead_request
*req
;
717 req
= container_of(areq
, struct aead_request
, base
);
718 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
721 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
723 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
724 struct crypto_aead
*child
= ctx
->child
;
725 struct aead_request
*req
;
727 req
= container_of(areq
, struct aead_request
, base
);
728 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
731 static int cryptd_aead_enqueue(struct aead_request
*req
,
732 crypto_completion_t
compl)
734 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
735 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
736 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
738 rctx
->complete
= req
->base
.complete
;
739 req
->base
.complete
= compl;
740 return cryptd_enqueue_request(queue
, &req
->base
);
743 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
745 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
748 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
750 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
753 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
755 struct aead_instance
*inst
= aead_alg_instance(tfm
);
756 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
757 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
758 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
759 struct crypto_aead
*cipher
;
761 cipher
= crypto_spawn_aead(spawn
);
763 return PTR_ERR(cipher
);
766 crypto_aead_set_reqsize(
767 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
768 crypto_aead_reqsize(cipher
)));
772 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
774 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
775 crypto_free_aead(ctx
->child
);
778 static int cryptd_create_aead(struct crypto_template
*tmpl
,
780 struct cryptd_queue
*queue
)
782 struct aead_instance_ctx
*ctx
;
783 struct aead_instance
*inst
;
784 struct aead_alg
*alg
;
787 u32 mask
= CRYPTO_ALG_ASYNC
;
790 cryptd_check_internal(tb
, &type
, &mask
);
792 name
= crypto_attr_alg_name(tb
[1]);
794 return PTR_ERR(name
);
796 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
800 ctx
= aead_instance_ctx(inst
);
803 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
804 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
808 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
809 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
813 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
814 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
815 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
817 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
818 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
820 inst
->alg
.init
= cryptd_aead_init_tfm
;
821 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
822 inst
->alg
.setkey
= cryptd_aead_setkey
;
823 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
824 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
825 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
827 err
= aead_register_instance(tmpl
, inst
);
830 crypto_drop_aead(&ctx
->aead_spawn
);
837 static struct cryptd_queue queue
;
839 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
841 struct crypto_attr_type
*algt
;
843 algt
= crypto_get_attr_type(tb
);
845 return PTR_ERR(algt
);
847 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
848 case CRYPTO_ALG_TYPE_BLKCIPHER
:
849 return cryptd_create_blkcipher(tmpl
, tb
, &queue
);
850 case CRYPTO_ALG_TYPE_DIGEST
:
851 return cryptd_create_hash(tmpl
, tb
, &queue
);
852 case CRYPTO_ALG_TYPE_AEAD
:
853 return cryptd_create_aead(tmpl
, tb
, &queue
);
859 static void cryptd_free(struct crypto_instance
*inst
)
861 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
862 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
863 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
865 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
866 case CRYPTO_ALG_TYPE_AHASH
:
867 crypto_drop_shash(&hctx
->spawn
);
868 kfree(ahash_instance(inst
));
870 case CRYPTO_ALG_TYPE_AEAD
:
871 crypto_drop_aead(&aead_ctx
->aead_spawn
);
872 kfree(aead_instance(inst
));
875 crypto_drop_spawn(&ctx
->spawn
);
880 static struct crypto_template cryptd_tmpl
= {
882 .create
= cryptd_create
,
884 .module
= THIS_MODULE
,
887 struct cryptd_ablkcipher
*cryptd_alloc_ablkcipher(const char *alg_name
,
890 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
891 struct crypto_tfm
*tfm
;
893 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
894 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
895 return ERR_PTR(-EINVAL
);
896 type
&= ~(CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_GENIV
);
897 type
|= CRYPTO_ALG_TYPE_BLKCIPHER
;
898 mask
&= ~CRYPTO_ALG_TYPE_MASK
;
899 mask
|= (CRYPTO_ALG_GENIV
| CRYPTO_ALG_TYPE_BLKCIPHER_MASK
);
900 tfm
= crypto_alloc_base(cryptd_alg_name
, type
, mask
);
902 return ERR_CAST(tfm
);
903 if (tfm
->__crt_alg
->cra_module
!= THIS_MODULE
) {
904 crypto_free_tfm(tfm
);
905 return ERR_PTR(-EINVAL
);
908 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm
));
910 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher
);
912 struct crypto_blkcipher
*cryptd_ablkcipher_child(struct cryptd_ablkcipher
*tfm
)
914 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
917 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child
);
919 void cryptd_free_ablkcipher(struct cryptd_ablkcipher
*tfm
)
921 crypto_free_ablkcipher(&tfm
->base
);
923 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher
);
925 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
928 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
929 struct crypto_ahash
*tfm
;
931 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
932 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
933 return ERR_PTR(-EINVAL
);
934 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
936 return ERR_CAST(tfm
);
937 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
938 crypto_free_ahash(tfm
);
939 return ERR_PTR(-EINVAL
);
942 return __cryptd_ahash_cast(tfm
);
944 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
946 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
948 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
952 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
954 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
956 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
959 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
961 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
963 crypto_free_ahash(&tfm
->base
);
965 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
967 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
970 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
971 struct crypto_aead
*tfm
;
973 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
974 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
975 return ERR_PTR(-EINVAL
);
976 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
978 return ERR_CAST(tfm
);
979 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
980 crypto_free_aead(tfm
);
981 return ERR_PTR(-EINVAL
);
983 return __cryptd_aead_cast(tfm
);
985 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
987 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
989 struct cryptd_aead_ctx
*ctx
;
990 ctx
= crypto_aead_ctx(&tfm
->base
);
993 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
995 void cryptd_free_aead(struct cryptd_aead
*tfm
)
997 crypto_free_aead(&tfm
->base
);
999 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1001 static int __init
cryptd_init(void)
1005 err
= cryptd_init_queue(&queue
, CRYPTD_MAX_CPU_QLEN
);
1009 err
= crypto_register_template(&cryptd_tmpl
);
1011 cryptd_fini_queue(&queue
);
1016 static void __exit
cryptd_exit(void)
1018 cryptd_fini_queue(&queue
);
1019 crypto_unregister_template(&cryptd_tmpl
);
1022 subsys_initcall(cryptd_init
);
1023 module_exit(cryptd_exit
);
1025 MODULE_LICENSE("GPL");
1026 MODULE_DESCRIPTION("Software async crypto daemon");
1027 MODULE_ALIAS_CRYPTO("cryptd");