net/mlx5e: Modify TIRs hash only when it's needed
[linux/fpc-iii.git] / crypto / cryptd.c
blob0c654e59f2157bc13e387a3ebfdcdc8b07b8170a
1 /*
2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
16 * any later version.
20 #include <crypto/algapi.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 #define CRYPTD_MAX_CPU_QLEN 1000
37 struct cryptd_cpu_queue {
38 struct crypto_queue queue;
39 struct work_struct work;
42 struct cryptd_queue {
43 struct cryptd_cpu_queue __percpu *cpu_queue;
46 struct cryptd_instance_ctx {
47 struct crypto_spawn spawn;
48 struct cryptd_queue *queue;
51 struct hashd_instance_ctx {
52 struct crypto_shash_spawn spawn;
53 struct cryptd_queue *queue;
56 struct aead_instance_ctx {
57 struct crypto_aead_spawn aead_spawn;
58 struct cryptd_queue *queue;
61 struct cryptd_blkcipher_ctx {
62 atomic_t refcnt;
63 struct crypto_blkcipher *child;
66 struct cryptd_blkcipher_request_ctx {
67 crypto_completion_t complete;
70 struct cryptd_hash_ctx {
71 atomic_t refcnt;
72 struct crypto_shash *child;
75 struct cryptd_hash_request_ctx {
76 crypto_completion_t complete;
77 struct shash_desc desc;
80 struct cryptd_aead_ctx {
81 atomic_t refcnt;
82 struct crypto_aead *child;
85 struct cryptd_aead_request_ctx {
86 crypto_completion_t complete;
89 static void cryptd_queue_worker(struct work_struct *work);
91 static int cryptd_init_queue(struct cryptd_queue *queue,
92 unsigned int max_cpu_qlen)
94 int cpu;
95 struct cryptd_cpu_queue *cpu_queue;
97 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
98 if (!queue->cpu_queue)
99 return -ENOMEM;
100 for_each_possible_cpu(cpu) {
101 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
102 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
103 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
105 return 0;
108 static void cryptd_fini_queue(struct cryptd_queue *queue)
110 int cpu;
111 struct cryptd_cpu_queue *cpu_queue;
113 for_each_possible_cpu(cpu) {
114 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
115 BUG_ON(cpu_queue->queue.qlen);
117 free_percpu(queue->cpu_queue);
120 static int cryptd_enqueue_request(struct cryptd_queue *queue,
121 struct crypto_async_request *request)
123 int cpu, err;
124 struct cryptd_cpu_queue *cpu_queue;
125 struct crypto_tfm *tfm;
126 atomic_t *refcnt;
127 bool may_backlog;
129 cpu = get_cpu();
130 cpu_queue = this_cpu_ptr(queue->cpu_queue);
131 err = crypto_enqueue_request(&cpu_queue->queue, request);
133 refcnt = crypto_tfm_ctx(request->tfm);
134 may_backlog = request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG;
136 if (err == -EBUSY && !may_backlog)
137 goto out_put_cpu;
139 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
141 if (!atomic_read(refcnt))
142 goto out_put_cpu;
144 tfm = request->tfm;
145 atomic_inc(refcnt);
147 out_put_cpu:
148 put_cpu();
150 return err;
153 /* Called in workqueue context, do one real cryption work (via
154 * req->complete) and reschedule itself if there are more work to
155 * do. */
156 static void cryptd_queue_worker(struct work_struct *work)
158 struct cryptd_cpu_queue *cpu_queue;
159 struct crypto_async_request *req, *backlog;
161 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
163 * Only handle one request at a time to avoid hogging crypto workqueue.
164 * preempt_disable/enable is used to prevent being preempted by
165 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
166 * cryptd_enqueue_request() being accessed from software interrupts.
168 local_bh_disable();
169 preempt_disable();
170 backlog = crypto_get_backlog(&cpu_queue->queue);
171 req = crypto_dequeue_request(&cpu_queue->queue);
172 preempt_enable();
173 local_bh_enable();
175 if (!req)
176 return;
178 if (backlog)
179 backlog->complete(backlog, -EINPROGRESS);
180 req->complete(req, 0);
182 if (cpu_queue->queue.qlen)
183 queue_work(kcrypto_wq, &cpu_queue->work);
186 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
188 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
189 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
190 return ictx->queue;
193 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
194 u32 *mask)
196 struct crypto_attr_type *algt;
198 algt = crypto_get_attr_type(tb);
199 if (IS_ERR(algt))
200 return;
202 *type |= algt->type & CRYPTO_ALG_INTERNAL;
203 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
206 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
207 const u8 *key, unsigned int keylen)
209 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
210 struct crypto_blkcipher *child = ctx->child;
211 int err;
213 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
214 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
215 CRYPTO_TFM_REQ_MASK);
216 err = crypto_blkcipher_setkey(child, key, keylen);
217 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
218 CRYPTO_TFM_RES_MASK);
219 return err;
222 static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
223 struct crypto_blkcipher *child,
224 int err,
225 int (*crypt)(struct blkcipher_desc *desc,
226 struct scatterlist *dst,
227 struct scatterlist *src,
228 unsigned int len))
230 struct cryptd_blkcipher_request_ctx *rctx;
231 struct cryptd_blkcipher_ctx *ctx;
232 struct crypto_ablkcipher *tfm;
233 struct blkcipher_desc desc;
234 int refcnt;
236 rctx = ablkcipher_request_ctx(req);
238 if (unlikely(err == -EINPROGRESS))
239 goto out;
241 desc.tfm = child;
242 desc.info = req->info;
243 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
245 err = crypt(&desc, req->dst, req->src, req->nbytes);
247 req->base.complete = rctx->complete;
249 out:
250 tfm = crypto_ablkcipher_reqtfm(req);
251 ctx = crypto_ablkcipher_ctx(tfm);
252 refcnt = atomic_read(&ctx->refcnt);
254 local_bh_disable();
255 rctx->complete(&req->base, err);
256 local_bh_enable();
258 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
259 crypto_free_ablkcipher(tfm);
262 static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
264 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
265 struct crypto_blkcipher *child = ctx->child;
267 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
268 crypto_blkcipher_crt(child)->encrypt);
271 static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
273 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
274 struct crypto_blkcipher *child = ctx->child;
276 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
277 crypto_blkcipher_crt(child)->decrypt);
280 static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
281 crypto_completion_t compl)
283 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
284 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
285 struct cryptd_queue *queue;
287 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
288 rctx->complete = req->base.complete;
289 req->base.complete = compl;
291 return cryptd_enqueue_request(queue, &req->base);
294 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
296 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
299 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
301 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
304 static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
306 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
307 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
308 struct crypto_spawn *spawn = &ictx->spawn;
309 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
310 struct crypto_blkcipher *cipher;
312 cipher = crypto_spawn_blkcipher(spawn);
313 if (IS_ERR(cipher))
314 return PTR_ERR(cipher);
316 ctx->child = cipher;
317 tfm->crt_ablkcipher.reqsize =
318 sizeof(struct cryptd_blkcipher_request_ctx);
319 return 0;
322 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
324 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
326 crypto_free_blkcipher(ctx->child);
329 static int cryptd_init_instance(struct crypto_instance *inst,
330 struct crypto_alg *alg)
332 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
333 "cryptd(%s)",
334 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
335 return -ENAMETOOLONG;
337 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
339 inst->alg.cra_priority = alg->cra_priority + 50;
340 inst->alg.cra_blocksize = alg->cra_blocksize;
341 inst->alg.cra_alignmask = alg->cra_alignmask;
343 return 0;
346 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
347 unsigned int tail)
349 char *p;
350 struct crypto_instance *inst;
351 int err;
353 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
354 if (!p)
355 return ERR_PTR(-ENOMEM);
357 inst = (void *)(p + head);
359 err = cryptd_init_instance(inst, alg);
360 if (err)
361 goto out_free_inst;
363 out:
364 return p;
366 out_free_inst:
367 kfree(p);
368 p = ERR_PTR(err);
369 goto out;
372 static int cryptd_create_blkcipher(struct crypto_template *tmpl,
373 struct rtattr **tb,
374 struct cryptd_queue *queue)
376 struct cryptd_instance_ctx *ctx;
377 struct crypto_instance *inst;
378 struct crypto_alg *alg;
379 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
380 u32 mask = CRYPTO_ALG_TYPE_MASK;
381 int err;
383 cryptd_check_internal(tb, &type, &mask);
385 alg = crypto_get_attr_alg(tb, type, mask);
386 if (IS_ERR(alg))
387 return PTR_ERR(alg);
389 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
390 err = PTR_ERR(inst);
391 if (IS_ERR(inst))
392 goto out_put_alg;
394 ctx = crypto_instance_ctx(inst);
395 ctx->queue = queue;
397 err = crypto_init_spawn(&ctx->spawn, alg, inst,
398 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
399 if (err)
400 goto out_free_inst;
402 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
403 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
404 type |= CRYPTO_ALG_INTERNAL;
405 inst->alg.cra_flags = type;
406 inst->alg.cra_type = &crypto_ablkcipher_type;
408 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
409 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
410 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
412 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
414 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
416 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
417 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
419 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
420 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
421 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
423 err = crypto_register_instance(tmpl, inst);
424 if (err) {
425 crypto_drop_spawn(&ctx->spawn);
426 out_free_inst:
427 kfree(inst);
430 out_put_alg:
431 crypto_mod_put(alg);
432 return err;
435 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
437 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
438 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
439 struct crypto_shash_spawn *spawn = &ictx->spawn;
440 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
441 struct crypto_shash *hash;
443 hash = crypto_spawn_shash(spawn);
444 if (IS_ERR(hash))
445 return PTR_ERR(hash);
447 ctx->child = hash;
448 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
449 sizeof(struct cryptd_hash_request_ctx) +
450 crypto_shash_descsize(hash));
451 return 0;
454 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
456 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
458 crypto_free_shash(ctx->child);
461 static int cryptd_hash_setkey(struct crypto_ahash *parent,
462 const u8 *key, unsigned int keylen)
464 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
465 struct crypto_shash *child = ctx->child;
466 int err;
468 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
469 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
470 CRYPTO_TFM_REQ_MASK);
471 err = crypto_shash_setkey(child, key, keylen);
472 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
473 CRYPTO_TFM_RES_MASK);
474 return err;
477 static int cryptd_hash_enqueue(struct ahash_request *req,
478 crypto_completion_t compl)
480 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
481 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
482 struct cryptd_queue *queue =
483 cryptd_get_queue(crypto_ahash_tfm(tfm));
485 rctx->complete = req->base.complete;
486 req->base.complete = compl;
488 return cryptd_enqueue_request(queue, &req->base);
491 static void cryptd_hash_complete(struct ahash_request *req, int err)
493 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
494 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
495 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
496 int refcnt = atomic_read(&ctx->refcnt);
498 local_bh_disable();
499 rctx->complete(&req->base, err);
500 local_bh_enable();
502 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
503 crypto_free_ahash(tfm);
506 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
508 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
509 struct crypto_shash *child = ctx->child;
510 struct ahash_request *req = ahash_request_cast(req_async);
511 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
512 struct shash_desc *desc = &rctx->desc;
514 if (unlikely(err == -EINPROGRESS))
515 goto out;
517 desc->tfm = child;
518 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
520 err = crypto_shash_init(desc);
522 req->base.complete = rctx->complete;
524 out:
525 cryptd_hash_complete(req, err);
528 static int cryptd_hash_init_enqueue(struct ahash_request *req)
530 return cryptd_hash_enqueue(req, cryptd_hash_init);
533 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
535 struct ahash_request *req = ahash_request_cast(req_async);
536 struct cryptd_hash_request_ctx *rctx;
538 rctx = ahash_request_ctx(req);
540 if (unlikely(err == -EINPROGRESS))
541 goto out;
543 err = shash_ahash_update(req, &rctx->desc);
545 req->base.complete = rctx->complete;
547 out:
548 cryptd_hash_complete(req, err);
551 static int cryptd_hash_update_enqueue(struct ahash_request *req)
553 return cryptd_hash_enqueue(req, cryptd_hash_update);
556 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
558 struct ahash_request *req = ahash_request_cast(req_async);
559 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
561 if (unlikely(err == -EINPROGRESS))
562 goto out;
564 err = crypto_shash_final(&rctx->desc, req->result);
566 req->base.complete = rctx->complete;
568 out:
569 cryptd_hash_complete(req, err);
572 static int cryptd_hash_final_enqueue(struct ahash_request *req)
574 return cryptd_hash_enqueue(req, cryptd_hash_final);
577 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
579 struct ahash_request *req = ahash_request_cast(req_async);
580 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
582 if (unlikely(err == -EINPROGRESS))
583 goto out;
585 err = shash_ahash_finup(req, &rctx->desc);
587 req->base.complete = rctx->complete;
589 out:
590 cryptd_hash_complete(req, err);
593 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
595 return cryptd_hash_enqueue(req, cryptd_hash_finup);
598 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
600 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
601 struct crypto_shash *child = ctx->child;
602 struct ahash_request *req = ahash_request_cast(req_async);
603 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
604 struct shash_desc *desc = &rctx->desc;
606 if (unlikely(err == -EINPROGRESS))
607 goto out;
609 desc->tfm = child;
610 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
612 err = shash_ahash_digest(req, desc);
614 req->base.complete = rctx->complete;
616 out:
617 cryptd_hash_complete(req, err);
620 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
622 return cryptd_hash_enqueue(req, cryptd_hash_digest);
625 static int cryptd_hash_export(struct ahash_request *req, void *out)
627 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
629 return crypto_shash_export(&rctx->desc, out);
632 static int cryptd_hash_import(struct ahash_request *req, const void *in)
634 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
635 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
636 struct shash_desc *desc = cryptd_shash_desc(req);
638 desc->tfm = ctx->child;
639 desc->flags = req->base.flags;
641 return crypto_shash_import(desc, in);
644 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
645 struct cryptd_queue *queue)
647 struct hashd_instance_ctx *ctx;
648 struct ahash_instance *inst;
649 struct shash_alg *salg;
650 struct crypto_alg *alg;
651 u32 type = 0;
652 u32 mask = 0;
653 int err;
655 cryptd_check_internal(tb, &type, &mask);
657 salg = shash_attr_alg(tb[1], type, mask);
658 if (IS_ERR(salg))
659 return PTR_ERR(salg);
661 alg = &salg->base;
662 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
663 sizeof(*ctx));
664 err = PTR_ERR(inst);
665 if (IS_ERR(inst))
666 goto out_put_alg;
668 ctx = ahash_instance_ctx(inst);
669 ctx->queue = queue;
671 err = crypto_init_shash_spawn(&ctx->spawn, salg,
672 ahash_crypto_instance(inst));
673 if (err)
674 goto out_free_inst;
676 type = CRYPTO_ALG_ASYNC;
677 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
678 type |= CRYPTO_ALG_INTERNAL;
679 inst->alg.halg.base.cra_flags = type;
681 inst->alg.halg.digestsize = salg->digestsize;
682 inst->alg.halg.statesize = salg->statesize;
683 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
685 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
686 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
688 inst->alg.init = cryptd_hash_init_enqueue;
689 inst->alg.update = cryptd_hash_update_enqueue;
690 inst->alg.final = cryptd_hash_final_enqueue;
691 inst->alg.finup = cryptd_hash_finup_enqueue;
692 inst->alg.export = cryptd_hash_export;
693 inst->alg.import = cryptd_hash_import;
694 inst->alg.setkey = cryptd_hash_setkey;
695 inst->alg.digest = cryptd_hash_digest_enqueue;
697 err = ahash_register_instance(tmpl, inst);
698 if (err) {
699 crypto_drop_shash(&ctx->spawn);
700 out_free_inst:
701 kfree(inst);
704 out_put_alg:
705 crypto_mod_put(alg);
706 return err;
709 static int cryptd_aead_setkey(struct crypto_aead *parent,
710 const u8 *key, unsigned int keylen)
712 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
713 struct crypto_aead *child = ctx->child;
715 return crypto_aead_setkey(child, key, keylen);
718 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
719 unsigned int authsize)
721 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
722 struct crypto_aead *child = ctx->child;
724 return crypto_aead_setauthsize(child, authsize);
727 static void cryptd_aead_crypt(struct aead_request *req,
728 struct crypto_aead *child,
729 int err,
730 int (*crypt)(struct aead_request *req))
732 struct cryptd_aead_request_ctx *rctx;
733 struct cryptd_aead_ctx *ctx;
734 crypto_completion_t compl;
735 struct crypto_aead *tfm;
736 int refcnt;
738 rctx = aead_request_ctx(req);
739 compl = rctx->complete;
741 tfm = crypto_aead_reqtfm(req);
743 if (unlikely(err == -EINPROGRESS))
744 goto out;
745 aead_request_set_tfm(req, child);
746 err = crypt( req );
748 out:
749 ctx = crypto_aead_ctx(tfm);
750 refcnt = atomic_read(&ctx->refcnt);
752 local_bh_disable();
753 compl(&req->base, err);
754 local_bh_enable();
756 if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
757 crypto_free_aead(tfm);
760 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
762 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
763 struct crypto_aead *child = ctx->child;
764 struct aead_request *req;
766 req = container_of(areq, struct aead_request, base);
767 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
770 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
772 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
773 struct crypto_aead *child = ctx->child;
774 struct aead_request *req;
776 req = container_of(areq, struct aead_request, base);
777 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
780 static int cryptd_aead_enqueue(struct aead_request *req,
781 crypto_completion_t compl)
783 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
784 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
785 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
787 rctx->complete = req->base.complete;
788 req->base.complete = compl;
789 return cryptd_enqueue_request(queue, &req->base);
792 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
794 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
797 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
799 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
802 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
804 struct aead_instance *inst = aead_alg_instance(tfm);
805 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
806 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
807 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
808 struct crypto_aead *cipher;
810 cipher = crypto_spawn_aead(spawn);
811 if (IS_ERR(cipher))
812 return PTR_ERR(cipher);
814 ctx->child = cipher;
815 crypto_aead_set_reqsize(
816 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
817 crypto_aead_reqsize(cipher)));
818 return 0;
821 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
823 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
824 crypto_free_aead(ctx->child);
827 static int cryptd_create_aead(struct crypto_template *tmpl,
828 struct rtattr **tb,
829 struct cryptd_queue *queue)
831 struct aead_instance_ctx *ctx;
832 struct aead_instance *inst;
833 struct aead_alg *alg;
834 const char *name;
835 u32 type = 0;
836 u32 mask = CRYPTO_ALG_ASYNC;
837 int err;
839 cryptd_check_internal(tb, &type, &mask);
841 name = crypto_attr_alg_name(tb[1]);
842 if (IS_ERR(name))
843 return PTR_ERR(name);
845 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
846 if (!inst)
847 return -ENOMEM;
849 ctx = aead_instance_ctx(inst);
850 ctx->queue = queue;
852 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
853 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
854 if (err)
855 goto out_free_inst;
857 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
858 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
859 if (err)
860 goto out_drop_aead;
862 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
863 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
864 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
866 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
867 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
869 inst->alg.init = cryptd_aead_init_tfm;
870 inst->alg.exit = cryptd_aead_exit_tfm;
871 inst->alg.setkey = cryptd_aead_setkey;
872 inst->alg.setauthsize = cryptd_aead_setauthsize;
873 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
874 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
876 err = aead_register_instance(tmpl, inst);
877 if (err) {
878 out_drop_aead:
879 crypto_drop_aead(&ctx->aead_spawn);
880 out_free_inst:
881 kfree(inst);
883 return err;
886 static struct cryptd_queue queue;
888 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
890 struct crypto_attr_type *algt;
892 algt = crypto_get_attr_type(tb);
893 if (IS_ERR(algt))
894 return PTR_ERR(algt);
896 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
897 case CRYPTO_ALG_TYPE_BLKCIPHER:
898 return cryptd_create_blkcipher(tmpl, tb, &queue);
899 case CRYPTO_ALG_TYPE_DIGEST:
900 return cryptd_create_hash(tmpl, tb, &queue);
901 case CRYPTO_ALG_TYPE_AEAD:
902 return cryptd_create_aead(tmpl, tb, &queue);
905 return -EINVAL;
908 static void cryptd_free(struct crypto_instance *inst)
910 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
911 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
912 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
914 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
915 case CRYPTO_ALG_TYPE_AHASH:
916 crypto_drop_shash(&hctx->spawn);
917 kfree(ahash_instance(inst));
918 return;
919 case CRYPTO_ALG_TYPE_AEAD:
920 crypto_drop_aead(&aead_ctx->aead_spawn);
921 kfree(aead_instance(inst));
922 return;
923 default:
924 crypto_drop_spawn(&ctx->spawn);
925 kfree(inst);
929 static struct crypto_template cryptd_tmpl = {
930 .name = "cryptd",
931 .create = cryptd_create,
932 .free = cryptd_free,
933 .module = THIS_MODULE,
936 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
937 u32 type, u32 mask)
939 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
940 struct cryptd_blkcipher_ctx *ctx;
941 struct crypto_tfm *tfm;
943 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
944 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
945 return ERR_PTR(-EINVAL);
946 type = crypto_skcipher_type(type);
947 mask &= ~CRYPTO_ALG_TYPE_MASK;
948 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
949 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
950 if (IS_ERR(tfm))
951 return ERR_CAST(tfm);
952 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
953 crypto_free_tfm(tfm);
954 return ERR_PTR(-EINVAL);
957 ctx = crypto_tfm_ctx(tfm);
958 atomic_set(&ctx->refcnt, 1);
960 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
962 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
964 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
966 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
967 return ctx->child;
969 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
971 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
973 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
975 return atomic_read(&ctx->refcnt) - 1;
977 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
979 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
981 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
983 if (atomic_dec_and_test(&ctx->refcnt))
984 crypto_free_ablkcipher(&tfm->base);
986 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
988 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
989 u32 type, u32 mask)
991 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
992 struct cryptd_hash_ctx *ctx;
993 struct crypto_ahash *tfm;
995 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
996 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
997 return ERR_PTR(-EINVAL);
998 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
999 if (IS_ERR(tfm))
1000 return ERR_CAST(tfm);
1001 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1002 crypto_free_ahash(tfm);
1003 return ERR_PTR(-EINVAL);
1006 ctx = crypto_ahash_ctx(tfm);
1007 atomic_set(&ctx->refcnt, 1);
1009 return __cryptd_ahash_cast(tfm);
1011 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
1013 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
1015 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1017 return ctx->child;
1019 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
1021 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
1023 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
1024 return &rctx->desc;
1026 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
1028 bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1030 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1032 return atomic_read(&ctx->refcnt) - 1;
1034 EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1036 void cryptd_free_ahash(struct cryptd_ahash *tfm)
1038 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1040 if (atomic_dec_and_test(&ctx->refcnt))
1041 crypto_free_ahash(&tfm->base);
1043 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1045 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1046 u32 type, u32 mask)
1048 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1049 struct cryptd_aead_ctx *ctx;
1050 struct crypto_aead *tfm;
1052 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1053 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1054 return ERR_PTR(-EINVAL);
1055 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1056 if (IS_ERR(tfm))
1057 return ERR_CAST(tfm);
1058 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1059 crypto_free_aead(tfm);
1060 return ERR_PTR(-EINVAL);
1063 ctx = crypto_aead_ctx(tfm);
1064 atomic_set(&ctx->refcnt, 1);
1066 return __cryptd_aead_cast(tfm);
1068 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1070 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1072 struct cryptd_aead_ctx *ctx;
1073 ctx = crypto_aead_ctx(&tfm->base);
1074 return ctx->child;
1076 EXPORT_SYMBOL_GPL(cryptd_aead_child);
1078 bool cryptd_aead_queued(struct cryptd_aead *tfm)
1080 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1082 return atomic_read(&ctx->refcnt) - 1;
1084 EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1086 void cryptd_free_aead(struct cryptd_aead *tfm)
1088 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1090 if (atomic_dec_and_test(&ctx->refcnt))
1091 crypto_free_aead(&tfm->base);
1093 EXPORT_SYMBOL_GPL(cryptd_free_aead);
1095 static int __init cryptd_init(void)
1097 int err;
1099 err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
1100 if (err)
1101 return err;
1103 err = crypto_register_template(&cryptd_tmpl);
1104 if (err)
1105 cryptd_fini_queue(&queue);
1107 return err;
1110 static void __exit cryptd_exit(void)
1112 cryptd_fini_queue(&queue);
1113 crypto_unregister_template(&cryptd_tmpl);
1116 subsys_initcall(cryptd_init);
1117 module_exit(cryptd_exit);
1119 MODULE_LICENSE("GPL");
1120 MODULE_DESCRIPTION("Software async crypto daemon");
1121 MODULE_ALIAS_CRYPTO("cryptd");