crypto: algapi - fold crypto_init_spawn() into crypto_grab_spawn()
[linux/fpc-iii.git] / crypto / algapi.c
blob72592795c7e71be38a0445ffb78b43f0c6eab126
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
20 #include "internal.h"
22 static LIST_HEAD(crypto_template_list);
24 static inline void crypto_check_module_sig(struct module *mod)
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
28 module_name(mod));
31 static int crypto_check_alg(struct crypto_alg *alg)
33 crypto_check_module_sig(alg->cra_module);
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 return -EINVAL;
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
58 if (alg->cra_priority < 0)
59 return -EINVAL;
61 refcount_set(&alg->cra_refcnt, 1);
63 return 0;
66 static void crypto_free_instance(struct crypto_instance *inst)
68 if (!inst->alg.cra_type->free) {
69 inst->tmpl->free(inst);
70 return;
73 inst->alg.cra_type->free(inst);
76 static void crypto_destroy_instance(struct crypto_alg *alg)
78 struct crypto_instance *inst = (void *)alg;
79 struct crypto_template *tmpl = inst->tmpl;
81 crypto_free_instance(inst);
82 crypto_tmpl_put(tmpl);
86 * This function adds a spawn to the list secondary_spawns which
87 * will be used at the end of crypto_remove_spawns to unregister
88 * instances, unless the spawn happens to be one that is depended
89 * on by the new algorithm (nalg in crypto_remove_spawns).
91 * This function is also responsible for resurrecting any algorithms
92 * in the dependency chain of nalg by unsetting n->dead.
94 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
95 struct list_head *stack,
96 struct list_head *top,
97 struct list_head *secondary_spawns)
99 struct crypto_spawn *spawn, *n;
101 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
102 if (!spawn)
103 return NULL;
105 n = list_prev_entry(spawn, list);
106 list_move(&spawn->list, secondary_spawns);
108 if (list_is_last(&n->list, stack))
109 return top;
111 n = list_next_entry(n, list);
112 if (!spawn->dead)
113 n->dead = false;
115 return &n->inst->alg.cra_users;
118 static void crypto_remove_instance(struct crypto_instance *inst,
119 struct list_head *list)
121 struct crypto_template *tmpl = inst->tmpl;
123 if (crypto_is_dead(&inst->alg))
124 return;
126 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
128 if (!tmpl || !crypto_tmpl_get(tmpl))
129 return;
131 list_move(&inst->alg.cra_list, list);
132 hlist_del(&inst->list);
133 inst->alg.cra_destroy = crypto_destroy_instance;
135 BUG_ON(!list_empty(&inst->alg.cra_users));
139 * Given an algorithm alg, remove all algorithms that depend on it
140 * through spawns. If nalg is not null, then exempt any algorithms
141 * that is depended on by nalg. This is useful when nalg itself
142 * depends on alg.
144 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
145 struct crypto_alg *nalg)
147 u32 new_type = (nalg ?: alg)->cra_flags;
148 struct crypto_spawn *spawn, *n;
149 LIST_HEAD(secondary_spawns);
150 struct list_head *spawns;
151 LIST_HEAD(stack);
152 LIST_HEAD(top);
154 spawns = &alg->cra_users;
155 list_for_each_entry_safe(spawn, n, spawns, list) {
156 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
157 continue;
159 list_move(&spawn->list, &top);
163 * Perform a depth-first walk starting from alg through
164 * the cra_users tree. The list stack records the path
165 * from alg to the current spawn.
167 spawns = &top;
168 do {
169 while (!list_empty(spawns)) {
170 struct crypto_instance *inst;
172 spawn = list_first_entry(spawns, struct crypto_spawn,
173 list);
174 inst = spawn->inst;
176 list_move(&spawn->list, &stack);
177 spawn->dead = !spawn->registered || &inst->alg != nalg;
179 if (!spawn->registered)
180 break;
182 BUG_ON(&inst->alg == alg);
184 if (&inst->alg == nalg)
185 break;
187 spawns = &inst->alg.cra_users;
190 * Even if spawn->registered is true, the
191 * instance itself may still be unregistered.
192 * This is because it may have failed during
193 * registration. Therefore we still need to
194 * make the following test.
196 * We may encounter an unregistered instance here, since
197 * an instance's spawns are set up prior to the instance
198 * being registered. An unregistered instance will have
199 * NULL ->cra_users.next, since ->cra_users isn't
200 * properly initialized until registration. But an
201 * unregistered instance cannot have any users, so treat
202 * it the same as ->cra_users being empty.
204 if (spawns->next == NULL)
205 break;
207 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
208 &secondary_spawns)));
211 * Remove all instances that are marked as dead. Also
212 * complete the resurrection of the others by moving them
213 * back to the cra_users list.
215 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
216 if (!spawn->dead)
217 list_move(&spawn->list, &spawn->alg->cra_users);
218 else if (spawn->registered)
219 crypto_remove_instance(spawn->inst, list);
222 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
224 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
226 struct crypto_alg *q;
227 struct crypto_larval *larval;
228 int ret = -EAGAIN;
230 if (crypto_is_dead(alg))
231 goto err;
233 INIT_LIST_HEAD(&alg->cra_users);
235 /* No cheating! */
236 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
238 ret = -EEXIST;
240 list_for_each_entry(q, &crypto_alg_list, cra_list) {
241 if (q == alg)
242 goto err;
244 if (crypto_is_moribund(q))
245 continue;
247 if (crypto_is_larval(q)) {
248 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
249 goto err;
250 continue;
253 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
254 !strcmp(q->cra_name, alg->cra_driver_name))
255 goto err;
258 larval = crypto_larval_alloc(alg->cra_name,
259 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
260 if (IS_ERR(larval))
261 goto out;
263 ret = -ENOENT;
264 larval->adult = crypto_mod_get(alg);
265 if (!larval->adult)
266 goto free_larval;
268 refcount_set(&larval->alg.cra_refcnt, 1);
269 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
270 CRYPTO_MAX_ALG_NAME);
271 larval->alg.cra_priority = alg->cra_priority;
273 list_add(&alg->cra_list, &crypto_alg_list);
274 list_add(&larval->alg.cra_list, &crypto_alg_list);
276 crypto_stats_init(alg);
278 out:
279 return larval;
281 free_larval:
282 kfree(larval);
283 err:
284 larval = ERR_PTR(ret);
285 goto out;
288 void crypto_alg_tested(const char *name, int err)
290 struct crypto_larval *test;
291 struct crypto_alg *alg;
292 struct crypto_alg *q;
293 LIST_HEAD(list);
294 bool best;
296 down_write(&crypto_alg_sem);
297 list_for_each_entry(q, &crypto_alg_list, cra_list) {
298 if (crypto_is_moribund(q) || !crypto_is_larval(q))
299 continue;
301 test = (struct crypto_larval *)q;
303 if (!strcmp(q->cra_driver_name, name))
304 goto found;
307 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
308 goto unlock;
310 found:
311 q->cra_flags |= CRYPTO_ALG_DEAD;
312 alg = test->adult;
313 if (err || list_empty(&alg->cra_list))
314 goto complete;
316 alg->cra_flags |= CRYPTO_ALG_TESTED;
318 /* Only satisfy larval waiters if we are the best. */
319 best = true;
320 list_for_each_entry(q, &crypto_alg_list, cra_list) {
321 if (crypto_is_moribund(q) || !crypto_is_larval(q))
322 continue;
324 if (strcmp(alg->cra_name, q->cra_name))
325 continue;
327 if (q->cra_priority > alg->cra_priority) {
328 best = false;
329 break;
333 list_for_each_entry(q, &crypto_alg_list, cra_list) {
334 if (q == alg)
335 continue;
337 if (crypto_is_moribund(q))
338 continue;
340 if (crypto_is_larval(q)) {
341 struct crypto_larval *larval = (void *)q;
344 * Check to see if either our generic name or
345 * specific name can satisfy the name requested
346 * by the larval entry q.
348 if (strcmp(alg->cra_name, q->cra_name) &&
349 strcmp(alg->cra_driver_name, q->cra_name))
350 continue;
352 if (larval->adult)
353 continue;
354 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
355 continue;
357 if (best && crypto_mod_get(alg))
358 larval->adult = alg;
359 else
360 larval->adult = ERR_PTR(-EAGAIN);
362 continue;
365 if (strcmp(alg->cra_name, q->cra_name))
366 continue;
368 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
369 q->cra_priority > alg->cra_priority)
370 continue;
372 crypto_remove_spawns(q, &list, alg);
375 complete:
376 complete_all(&test->completion);
378 unlock:
379 up_write(&crypto_alg_sem);
381 crypto_remove_final(&list);
383 EXPORT_SYMBOL_GPL(crypto_alg_tested);
385 void crypto_remove_final(struct list_head *list)
387 struct crypto_alg *alg;
388 struct crypto_alg *n;
390 list_for_each_entry_safe(alg, n, list, cra_list) {
391 list_del_init(&alg->cra_list);
392 crypto_alg_put(alg);
395 EXPORT_SYMBOL_GPL(crypto_remove_final);
397 static void crypto_wait_for_test(struct crypto_larval *larval)
399 int err;
401 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
402 if (err != NOTIFY_STOP) {
403 if (WARN_ON(err != NOTIFY_DONE))
404 goto out;
405 crypto_alg_tested(larval->alg.cra_driver_name, 0);
408 err = wait_for_completion_killable(&larval->completion);
409 WARN_ON(err);
410 if (!err)
411 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval);
413 out:
414 crypto_larval_kill(&larval->alg);
417 int crypto_register_alg(struct crypto_alg *alg)
419 struct crypto_larval *larval;
420 int err;
422 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
423 err = crypto_check_alg(alg);
424 if (err)
425 return err;
427 down_write(&crypto_alg_sem);
428 larval = __crypto_register_alg(alg);
429 up_write(&crypto_alg_sem);
431 if (IS_ERR(larval))
432 return PTR_ERR(larval);
434 crypto_wait_for_test(larval);
435 return 0;
437 EXPORT_SYMBOL_GPL(crypto_register_alg);
439 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
441 if (unlikely(list_empty(&alg->cra_list)))
442 return -ENOENT;
444 alg->cra_flags |= CRYPTO_ALG_DEAD;
446 list_del_init(&alg->cra_list);
447 crypto_remove_spawns(alg, list, NULL);
449 return 0;
452 void crypto_unregister_alg(struct crypto_alg *alg)
454 int ret;
455 LIST_HEAD(list);
457 down_write(&crypto_alg_sem);
458 ret = crypto_remove_alg(alg, &list);
459 up_write(&crypto_alg_sem);
461 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
462 return;
464 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
465 if (alg->cra_destroy)
466 alg->cra_destroy(alg);
468 crypto_remove_final(&list);
470 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
472 int crypto_register_algs(struct crypto_alg *algs, int count)
474 int i, ret;
476 for (i = 0; i < count; i++) {
477 ret = crypto_register_alg(&algs[i]);
478 if (ret)
479 goto err;
482 return 0;
484 err:
485 for (--i; i >= 0; --i)
486 crypto_unregister_alg(&algs[i]);
488 return ret;
490 EXPORT_SYMBOL_GPL(crypto_register_algs);
492 void crypto_unregister_algs(struct crypto_alg *algs, int count)
494 int i;
496 for (i = 0; i < count; i++)
497 crypto_unregister_alg(&algs[i]);
499 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
501 int crypto_register_template(struct crypto_template *tmpl)
503 struct crypto_template *q;
504 int err = -EEXIST;
506 down_write(&crypto_alg_sem);
508 crypto_check_module_sig(tmpl->module);
510 list_for_each_entry(q, &crypto_template_list, list) {
511 if (q == tmpl)
512 goto out;
515 list_add(&tmpl->list, &crypto_template_list);
516 err = 0;
517 out:
518 up_write(&crypto_alg_sem);
519 return err;
521 EXPORT_SYMBOL_GPL(crypto_register_template);
523 int crypto_register_templates(struct crypto_template *tmpls, int count)
525 int i, err;
527 for (i = 0; i < count; i++) {
528 err = crypto_register_template(&tmpls[i]);
529 if (err)
530 goto out;
532 return 0;
534 out:
535 for (--i; i >= 0; --i)
536 crypto_unregister_template(&tmpls[i]);
537 return err;
539 EXPORT_SYMBOL_GPL(crypto_register_templates);
541 void crypto_unregister_template(struct crypto_template *tmpl)
543 struct crypto_instance *inst;
544 struct hlist_node *n;
545 struct hlist_head *list;
546 LIST_HEAD(users);
548 down_write(&crypto_alg_sem);
550 BUG_ON(list_empty(&tmpl->list));
551 list_del_init(&tmpl->list);
553 list = &tmpl->instances;
554 hlist_for_each_entry(inst, list, list) {
555 int err = crypto_remove_alg(&inst->alg, &users);
557 BUG_ON(err);
560 up_write(&crypto_alg_sem);
562 hlist_for_each_entry_safe(inst, n, list, list) {
563 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
564 crypto_free_instance(inst);
566 crypto_remove_final(&users);
568 EXPORT_SYMBOL_GPL(crypto_unregister_template);
570 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
572 int i;
574 for (i = count - 1; i >= 0; --i)
575 crypto_unregister_template(&tmpls[i]);
577 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
579 static struct crypto_template *__crypto_lookup_template(const char *name)
581 struct crypto_template *q, *tmpl = NULL;
583 down_read(&crypto_alg_sem);
584 list_for_each_entry(q, &crypto_template_list, list) {
585 if (strcmp(q->name, name))
586 continue;
587 if (unlikely(!crypto_tmpl_get(q)))
588 continue;
590 tmpl = q;
591 break;
593 up_read(&crypto_alg_sem);
595 return tmpl;
598 struct crypto_template *crypto_lookup_template(const char *name)
600 return try_then_request_module(__crypto_lookup_template(name),
601 "crypto-%s", name);
603 EXPORT_SYMBOL_GPL(crypto_lookup_template);
605 int crypto_register_instance(struct crypto_template *tmpl,
606 struct crypto_instance *inst)
608 struct crypto_larval *larval;
609 struct crypto_spawn *spawn;
610 int err;
612 err = crypto_check_alg(&inst->alg);
613 if (err)
614 return err;
616 inst->alg.cra_module = tmpl->module;
617 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
619 down_write(&crypto_alg_sem);
621 larval = ERR_PTR(-EAGAIN);
622 for (spawn = inst->spawns; spawn;) {
623 struct crypto_spawn *next;
625 if (spawn->dead)
626 goto unlock;
628 next = spawn->next;
629 spawn->inst = inst;
630 spawn->registered = true;
632 crypto_mod_put(spawn->alg);
634 spawn = next;
637 larval = __crypto_register_alg(&inst->alg);
638 if (IS_ERR(larval))
639 goto unlock;
641 hlist_add_head(&inst->list, &tmpl->instances);
642 inst->tmpl = tmpl;
644 unlock:
645 up_write(&crypto_alg_sem);
647 err = PTR_ERR(larval);
648 if (IS_ERR(larval))
649 goto err;
651 crypto_wait_for_test(larval);
652 err = 0;
654 err:
655 return err;
657 EXPORT_SYMBOL_GPL(crypto_register_instance);
659 void crypto_unregister_instance(struct crypto_instance *inst)
661 LIST_HEAD(list);
663 down_write(&crypto_alg_sem);
665 crypto_remove_spawns(&inst->alg, &list, NULL);
666 crypto_remove_instance(inst, &list);
668 up_write(&crypto_alg_sem);
670 crypto_remove_final(&list);
672 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
674 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
675 const char *name, u32 type, u32 mask)
677 struct crypto_alg *alg;
678 int err = -EAGAIN;
680 if (WARN_ON_ONCE(inst == NULL))
681 return -EINVAL;
683 /* Allow the result of crypto_attr_alg_name() to be passed directly */
684 if (IS_ERR(name))
685 return PTR_ERR(name);
687 alg = crypto_find_alg(name, spawn->frontend, type, mask);
688 if (IS_ERR(alg))
689 return PTR_ERR(alg);
691 down_write(&crypto_alg_sem);
692 if (!crypto_is_moribund(alg)) {
693 list_add(&spawn->list, &alg->cra_users);
694 spawn->alg = alg;
695 spawn->mask = mask;
696 spawn->next = inst->spawns;
697 inst->spawns = spawn;
698 err = 0;
700 up_write(&crypto_alg_sem);
701 if (err)
702 crypto_mod_put(alg);
703 return err;
705 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
707 void crypto_drop_spawn(struct crypto_spawn *spawn)
709 if (!spawn->alg) /* not yet initialized? */
710 return;
712 down_write(&crypto_alg_sem);
713 if (!spawn->dead)
714 list_del(&spawn->list);
715 up_write(&crypto_alg_sem);
717 if (!spawn->registered)
718 crypto_mod_put(spawn->alg);
720 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
722 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
724 struct crypto_alg *alg;
726 down_read(&crypto_alg_sem);
727 alg = spawn->alg;
728 if (!spawn->dead && !crypto_mod_get(alg)) {
729 alg->cra_flags |= CRYPTO_ALG_DYING;
730 alg = NULL;
732 up_read(&crypto_alg_sem);
734 return alg ?: ERR_PTR(-EAGAIN);
737 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
738 u32 mask)
740 struct crypto_alg *alg;
741 struct crypto_tfm *tfm;
743 alg = crypto_spawn_alg(spawn);
744 if (IS_ERR(alg))
745 return ERR_CAST(alg);
747 tfm = ERR_PTR(-EINVAL);
748 if (unlikely((alg->cra_flags ^ type) & mask))
749 goto out_put_alg;
751 tfm = __crypto_alloc_tfm(alg, type, mask);
752 if (IS_ERR(tfm))
753 goto out_put_alg;
755 return tfm;
757 out_put_alg:
758 crypto_mod_put(alg);
759 return tfm;
761 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
763 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
765 struct crypto_alg *alg;
766 struct crypto_tfm *tfm;
768 alg = crypto_spawn_alg(spawn);
769 if (IS_ERR(alg))
770 return ERR_CAST(alg);
772 tfm = crypto_create_tfm(alg, spawn->frontend);
773 if (IS_ERR(tfm))
774 goto out_put_alg;
776 return tfm;
778 out_put_alg:
779 crypto_mod_put(alg);
780 return tfm;
782 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
784 int crypto_register_notifier(struct notifier_block *nb)
786 return blocking_notifier_chain_register(&crypto_chain, nb);
788 EXPORT_SYMBOL_GPL(crypto_register_notifier);
790 int crypto_unregister_notifier(struct notifier_block *nb)
792 return blocking_notifier_chain_unregister(&crypto_chain, nb);
794 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
796 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
798 struct rtattr *rta = tb[0];
799 struct crypto_attr_type *algt;
801 if (!rta)
802 return ERR_PTR(-ENOENT);
803 if (RTA_PAYLOAD(rta) < sizeof(*algt))
804 return ERR_PTR(-EINVAL);
805 if (rta->rta_type != CRYPTOA_TYPE)
806 return ERR_PTR(-EINVAL);
808 algt = RTA_DATA(rta);
810 return algt;
812 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
814 int crypto_check_attr_type(struct rtattr **tb, u32 type)
816 struct crypto_attr_type *algt;
818 algt = crypto_get_attr_type(tb);
819 if (IS_ERR(algt))
820 return PTR_ERR(algt);
822 if ((algt->type ^ type) & algt->mask)
823 return -EINVAL;
825 return 0;
827 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
829 const char *crypto_attr_alg_name(struct rtattr *rta)
831 struct crypto_attr_alg *alga;
833 if (!rta)
834 return ERR_PTR(-ENOENT);
835 if (RTA_PAYLOAD(rta) < sizeof(*alga))
836 return ERR_PTR(-EINVAL);
837 if (rta->rta_type != CRYPTOA_ALG)
838 return ERR_PTR(-EINVAL);
840 alga = RTA_DATA(rta);
841 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
843 return alga->name;
845 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
847 int crypto_attr_u32(struct rtattr *rta, u32 *num)
849 struct crypto_attr_u32 *nu32;
851 if (!rta)
852 return -ENOENT;
853 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
854 return -EINVAL;
855 if (rta->rta_type != CRYPTOA_U32)
856 return -EINVAL;
858 nu32 = RTA_DATA(rta);
859 *num = nu32->num;
861 return 0;
863 EXPORT_SYMBOL_GPL(crypto_attr_u32);
865 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
866 struct crypto_alg *alg)
868 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
869 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
870 return -ENAMETOOLONG;
872 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
873 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
874 return -ENAMETOOLONG;
876 return 0;
878 EXPORT_SYMBOL_GPL(crypto_inst_setname);
880 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
882 INIT_LIST_HEAD(&queue->list);
883 queue->backlog = &queue->list;
884 queue->qlen = 0;
885 queue->max_qlen = max_qlen;
887 EXPORT_SYMBOL_GPL(crypto_init_queue);
889 int crypto_enqueue_request(struct crypto_queue *queue,
890 struct crypto_async_request *request)
892 int err = -EINPROGRESS;
894 if (unlikely(queue->qlen >= queue->max_qlen)) {
895 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
896 err = -ENOSPC;
897 goto out;
899 err = -EBUSY;
900 if (queue->backlog == &queue->list)
901 queue->backlog = &request->list;
904 queue->qlen++;
905 list_add_tail(&request->list, &queue->list);
907 out:
908 return err;
910 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
912 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
914 struct list_head *request;
916 if (unlikely(!queue->qlen))
917 return NULL;
919 queue->qlen--;
921 if (queue->backlog != &queue->list)
922 queue->backlog = queue->backlog->next;
924 request = queue->list.next;
925 list_del(request);
927 return list_entry(request, struct crypto_async_request, list);
929 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
931 static inline void crypto_inc_byte(u8 *a, unsigned int size)
933 u8 *b = (a + size);
934 u8 c;
936 for (; size; size--) {
937 c = *--b + 1;
938 *b = c;
939 if (c)
940 break;
944 void crypto_inc(u8 *a, unsigned int size)
946 __be32 *b = (__be32 *)(a + size);
947 u32 c;
949 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
950 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
951 for (; size >= 4; size -= 4) {
952 c = be32_to_cpu(*--b) + 1;
953 *b = cpu_to_be32(c);
954 if (likely(c))
955 return;
958 crypto_inc_byte(a, size);
960 EXPORT_SYMBOL_GPL(crypto_inc);
962 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
964 int relalign = 0;
966 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
967 int size = sizeof(unsigned long);
968 int d = (((unsigned long)dst ^ (unsigned long)src1) |
969 ((unsigned long)dst ^ (unsigned long)src2)) &
970 (size - 1);
972 relalign = d ? 1 << __ffs(d) : size;
975 * If we care about alignment, process as many bytes as
976 * needed to advance dst and src to values whose alignments
977 * equal their relative alignment. This will allow us to
978 * process the remainder of the input using optimal strides.
980 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
981 *dst++ = *src1++ ^ *src2++;
982 len--;
986 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
987 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
988 dst += 8;
989 src1 += 8;
990 src2 += 8;
991 len -= 8;
994 while (len >= 4 && !(relalign & 3)) {
995 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
996 dst += 4;
997 src1 += 4;
998 src2 += 4;
999 len -= 4;
1002 while (len >= 2 && !(relalign & 1)) {
1003 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1004 dst += 2;
1005 src1 += 2;
1006 src2 += 2;
1007 len -= 2;
1010 while (len--)
1011 *dst++ = *src1++ ^ *src2++;
1013 EXPORT_SYMBOL_GPL(__crypto_xor);
1015 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1017 return alg->cra_ctxsize +
1018 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1020 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1022 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1023 u32 type, u32 mask)
1025 int ret = 0;
1026 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1028 if (!IS_ERR(alg)) {
1029 crypto_mod_put(alg);
1030 ret = 1;
1033 return ret;
1035 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1037 #ifdef CONFIG_CRYPTO_STATS
1038 void crypto_stats_init(struct crypto_alg *alg)
1040 memset(&alg->stats, 0, sizeof(alg->stats));
1042 EXPORT_SYMBOL_GPL(crypto_stats_init);
1044 void crypto_stats_get(struct crypto_alg *alg)
1046 crypto_alg_get(alg);
1048 EXPORT_SYMBOL_GPL(crypto_stats_get);
1050 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1051 int ret)
1053 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1054 atomic64_inc(&alg->stats.aead.err_cnt);
1055 } else {
1056 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1057 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1059 crypto_alg_put(alg);
1061 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1063 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1064 int ret)
1066 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1067 atomic64_inc(&alg->stats.aead.err_cnt);
1068 } else {
1069 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1070 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1072 crypto_alg_put(alg);
1074 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1076 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1077 struct crypto_alg *alg)
1079 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1080 atomic64_inc(&alg->stats.akcipher.err_cnt);
1081 } else {
1082 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1083 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1085 crypto_alg_put(alg);
1087 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1089 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1090 struct crypto_alg *alg)
1092 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1093 atomic64_inc(&alg->stats.akcipher.err_cnt);
1094 } else {
1095 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1096 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1098 crypto_alg_put(alg);
1100 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1102 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1104 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1105 atomic64_inc(&alg->stats.akcipher.err_cnt);
1106 else
1107 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1108 crypto_alg_put(alg);
1110 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1112 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1114 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1115 atomic64_inc(&alg->stats.akcipher.err_cnt);
1116 else
1117 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1118 crypto_alg_put(alg);
1120 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1122 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1124 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1125 atomic64_inc(&alg->stats.compress.err_cnt);
1126 } else {
1127 atomic64_inc(&alg->stats.compress.compress_cnt);
1128 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1130 crypto_alg_put(alg);
1132 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1134 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1136 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1137 atomic64_inc(&alg->stats.compress.err_cnt);
1138 } else {
1139 atomic64_inc(&alg->stats.compress.decompress_cnt);
1140 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1142 crypto_alg_put(alg);
1144 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1146 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1147 struct crypto_alg *alg)
1149 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1150 atomic64_inc(&alg->stats.hash.err_cnt);
1151 else
1152 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1153 crypto_alg_put(alg);
1155 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1157 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1158 struct crypto_alg *alg)
1160 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1161 atomic64_inc(&alg->stats.hash.err_cnt);
1162 } else {
1163 atomic64_inc(&alg->stats.hash.hash_cnt);
1164 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1166 crypto_alg_put(alg);
1168 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1170 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1172 if (ret)
1173 atomic64_inc(&alg->stats.kpp.err_cnt);
1174 else
1175 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1176 crypto_alg_put(alg);
1178 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1180 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1182 if (ret)
1183 atomic64_inc(&alg->stats.kpp.err_cnt);
1184 else
1185 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1186 crypto_alg_put(alg);
1188 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1190 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1192 if (ret)
1193 atomic64_inc(&alg->stats.kpp.err_cnt);
1194 else
1195 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1196 crypto_alg_put(alg);
1198 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1200 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1202 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1203 atomic64_inc(&alg->stats.rng.err_cnt);
1204 else
1205 atomic64_inc(&alg->stats.rng.seed_cnt);
1206 crypto_alg_put(alg);
1208 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1210 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1211 int ret)
1213 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1214 atomic64_inc(&alg->stats.rng.err_cnt);
1215 } else {
1216 atomic64_inc(&alg->stats.rng.generate_cnt);
1217 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1219 crypto_alg_put(alg);
1221 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1223 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1224 struct crypto_alg *alg)
1226 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1227 atomic64_inc(&alg->stats.cipher.err_cnt);
1228 } else {
1229 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1230 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1232 crypto_alg_put(alg);
1234 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1236 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1237 struct crypto_alg *alg)
1239 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1240 atomic64_inc(&alg->stats.cipher.err_cnt);
1241 } else {
1242 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1243 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1245 crypto_alg_put(alg);
1247 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1248 #endif
1250 static int __init crypto_algapi_init(void)
1252 crypto_init_proc();
1253 return 0;
1256 static void __exit crypto_algapi_exit(void)
1258 crypto_exit_proc();
1261 module_init(crypto_algapi_init);
1262 module_exit(crypto_algapi_exit);
1264 MODULE_LICENSE("GPL");
1265 MODULE_DESCRIPTION("Cryptographic algorithms API");