1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/kernel.h>
16 #include <linux/kmod.h>
17 #include <linux/module.h>
18 #include <linux/param.h>
19 #include <linux/sched/signal.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <linux/completion.h>
25 LIST_HEAD(crypto_alg_list
);
26 EXPORT_SYMBOL_GPL(crypto_alg_list
);
27 DECLARE_RWSEM(crypto_alg_sem
);
28 EXPORT_SYMBOL_GPL(crypto_alg_sem
);
30 BLOCKING_NOTIFIER_HEAD(crypto_chain
);
31 EXPORT_SYMBOL_GPL(crypto_chain
);
33 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
);
35 struct crypto_alg
*crypto_mod_get(struct crypto_alg
*alg
)
37 return try_module_get(alg
->cra_module
) ? crypto_alg_get(alg
) : NULL
;
39 EXPORT_SYMBOL_GPL(crypto_mod_get
);
41 void crypto_mod_put(struct crypto_alg
*alg
)
43 struct module
*module
= alg
->cra_module
;
48 EXPORT_SYMBOL_GPL(crypto_mod_put
);
50 static inline int crypto_is_test_larval(struct crypto_larval
*larval
)
52 return larval
->alg
.cra_driver_name
[0];
55 static struct crypto_alg
*__crypto_alg_lookup(const char *name
, u32 type
,
58 struct crypto_alg
*q
, *alg
= NULL
;
61 list_for_each_entry(q
, &crypto_alg_list
, cra_list
) {
64 if (crypto_is_moribund(q
))
67 if ((q
->cra_flags
^ type
) & mask
)
70 if (crypto_is_larval(q
) &&
71 !crypto_is_test_larval((struct crypto_larval
*)q
) &&
72 ((struct crypto_larval
*)q
)->mask
!= mask
)
75 exact
= !strcmp(q
->cra_driver_name
, name
);
76 fuzzy
= !strcmp(q
->cra_name
, name
);
77 if (!exact
&& !(fuzzy
&& q
->cra_priority
> best
))
80 if (unlikely(!crypto_mod_get(q
)))
83 best
= q
->cra_priority
;
95 static void crypto_larval_destroy(struct crypto_alg
*alg
)
97 struct crypto_larval
*larval
= (void *)alg
;
99 BUG_ON(!crypto_is_larval(alg
));
100 if (!IS_ERR_OR_NULL(larval
->adult
))
101 crypto_mod_put(larval
->adult
);
105 struct crypto_larval
*crypto_larval_alloc(const char *name
, u32 type
, u32 mask
)
107 struct crypto_larval
*larval
;
109 larval
= kzalloc(sizeof(*larval
), GFP_KERNEL
);
111 return ERR_PTR(-ENOMEM
);
114 larval
->alg
.cra_flags
= CRYPTO_ALG_LARVAL
| type
;
115 larval
->alg
.cra_priority
= -1;
116 larval
->alg
.cra_destroy
= crypto_larval_destroy
;
118 strlcpy(larval
->alg
.cra_name
, name
, CRYPTO_MAX_ALG_NAME
);
119 init_completion(&larval
->completion
);
123 EXPORT_SYMBOL_GPL(crypto_larval_alloc
);
125 static struct crypto_alg
*crypto_larval_add(const char *name
, u32 type
,
128 struct crypto_alg
*alg
;
129 struct crypto_larval
*larval
;
131 larval
= crypto_larval_alloc(name
, type
, mask
);
133 return ERR_CAST(larval
);
135 refcount_set(&larval
->alg
.cra_refcnt
, 2);
137 down_write(&crypto_alg_sem
);
138 alg
= __crypto_alg_lookup(name
, type
, mask
);
141 list_add(&alg
->cra_list
, &crypto_alg_list
);
143 up_write(&crypto_alg_sem
);
145 if (alg
!= &larval
->alg
) {
147 if (crypto_is_larval(alg
))
148 alg
= crypto_larval_wait(alg
);
154 void crypto_larval_kill(struct crypto_alg
*alg
)
156 struct crypto_larval
*larval
= (void *)alg
;
158 down_write(&crypto_alg_sem
);
159 list_del(&alg
->cra_list
);
160 up_write(&crypto_alg_sem
);
161 complete_all(&larval
->completion
);
164 EXPORT_SYMBOL_GPL(crypto_larval_kill
);
166 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
)
168 struct crypto_larval
*larval
= (void *)alg
;
171 timeout
= wait_for_completion_killable_timeout(
172 &larval
->completion
, 60 * HZ
);
176 alg
= ERR_PTR(-EINTR
);
178 alg
= ERR_PTR(-ETIMEDOUT
);
180 alg
= ERR_PTR(-ENOENT
);
181 else if (IS_ERR(alg
))
183 else if (crypto_is_test_larval(larval
) &&
184 !(alg
->cra_flags
& CRYPTO_ALG_TESTED
))
185 alg
= ERR_PTR(-EAGAIN
);
186 else if (!crypto_mod_get(alg
))
187 alg
= ERR_PTR(-EAGAIN
);
188 crypto_mod_put(&larval
->alg
);
193 static struct crypto_alg
*crypto_alg_lookup(const char *name
, u32 type
,
196 struct crypto_alg
*alg
;
199 if (!((type
| mask
) & CRYPTO_ALG_TESTED
))
200 test
|= CRYPTO_ALG_TESTED
;
202 down_read(&crypto_alg_sem
);
203 alg
= __crypto_alg_lookup(name
, type
| test
, mask
| test
);
205 alg
= __crypto_alg_lookup(name
, type
, mask
);
206 if (alg
&& !crypto_is_larval(alg
)) {
209 alg
= ERR_PTR(-ELIBBAD
);
212 up_read(&crypto_alg_sem
);
217 static struct crypto_alg
*crypto_larval_lookup(const char *name
, u32 type
,
220 struct crypto_alg
*alg
;
223 return ERR_PTR(-ENOENT
);
225 type
&= ~(CRYPTO_ALG_LARVAL
| CRYPTO_ALG_DEAD
);
226 mask
&= ~(CRYPTO_ALG_LARVAL
| CRYPTO_ALG_DEAD
);
228 alg
= crypto_alg_lookup(name
, type
, mask
);
229 if (!alg
&& !(mask
& CRYPTO_NOLOAD
)) {
230 request_module("crypto-%s", name
);
232 if (!((type
^ CRYPTO_ALG_NEED_FALLBACK
) & mask
&
233 CRYPTO_ALG_NEED_FALLBACK
))
234 request_module("crypto-%s-all", name
);
236 alg
= crypto_alg_lookup(name
, type
, mask
);
239 if (!IS_ERR_OR_NULL(alg
) && crypto_is_larval(alg
))
240 alg
= crypto_larval_wait(alg
);
242 alg
= crypto_larval_add(name
, type
, mask
);
247 int crypto_probing_notify(unsigned long val
, void *v
)
251 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
252 if (ok
== NOTIFY_DONE
) {
253 request_module("cryptomgr");
254 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
259 EXPORT_SYMBOL_GPL(crypto_probing_notify
);
261 struct crypto_alg
*crypto_alg_mod_lookup(const char *name
, u32 type
, u32 mask
)
263 struct crypto_alg
*alg
;
264 struct crypto_alg
*larval
;
268 * If the internal flag is set for a cipher, require a caller to
269 * to invoke the cipher with the internal flag to use that cipher.
270 * Also, if a caller wants to allocate a cipher that may or may
271 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272 * !(mask & CRYPTO_ALG_INTERNAL).
274 if (!((type
| mask
) & CRYPTO_ALG_INTERNAL
))
275 mask
|= CRYPTO_ALG_INTERNAL
;
277 larval
= crypto_larval_lookup(name
, type
, mask
);
278 if (IS_ERR(larval
) || !crypto_is_larval(larval
))
281 ok
= crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST
, larval
);
283 if (ok
== NOTIFY_STOP
)
284 alg
= crypto_larval_wait(larval
);
286 crypto_mod_put(larval
);
287 alg
= ERR_PTR(-ENOENT
);
289 crypto_larval_kill(larval
);
292 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup
);
294 static int crypto_init_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
296 const struct crypto_type
*type_obj
= tfm
->__crt_alg
->cra_type
;
299 return type_obj
->init(tfm
, type
, mask
);
303 static void crypto_exit_ops(struct crypto_tfm
*tfm
)
305 const struct crypto_type
*type
= tfm
->__crt_alg
->cra_type
;
307 if (type
&& tfm
->exit
)
311 static unsigned int crypto_ctxsize(struct crypto_alg
*alg
, u32 type
, u32 mask
)
313 const struct crypto_type
*type_obj
= alg
->cra_type
;
316 len
= alg
->cra_alignmask
& ~(crypto_tfm_ctx_alignment() - 1);
318 return len
+ type_obj
->ctxsize(alg
, type
, mask
);
320 switch (alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
324 case CRYPTO_ALG_TYPE_CIPHER
:
325 len
+= crypto_cipher_ctxsize(alg
);
328 case CRYPTO_ALG_TYPE_COMPRESS
:
329 len
+= crypto_compress_ctxsize(alg
);
336 void crypto_shoot_alg(struct crypto_alg
*alg
)
338 down_write(&crypto_alg_sem
);
339 alg
->cra_flags
|= CRYPTO_ALG_DYING
;
340 up_write(&crypto_alg_sem
);
342 EXPORT_SYMBOL_GPL(crypto_shoot_alg
);
344 struct crypto_tfm
*__crypto_alloc_tfm(struct crypto_alg
*alg
, u32 type
,
347 struct crypto_tfm
*tfm
= NULL
;
348 unsigned int tfm_size
;
351 tfm_size
= sizeof(*tfm
) + crypto_ctxsize(alg
, type
, mask
);
352 tfm
= kzalloc(tfm_size
, GFP_KERNEL
);
356 tfm
->__crt_alg
= alg
;
358 err
= crypto_init_ops(tfm
, type
, mask
);
362 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
363 goto cra_init_failed
;
368 crypto_exit_ops(tfm
);
371 crypto_shoot_alg(alg
);
378 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm
);
381 * crypto_alloc_base - Locate algorithm and allocate transform
382 * @alg_name: Name of algorithm
383 * @type: Type of algorithm
384 * @mask: Mask for type comparison
386 * This function should not be used by new algorithm types.
387 * Please use crypto_alloc_tfm instead.
389 * crypto_alloc_base() will first attempt to locate an already loaded
390 * algorithm. If that fails and the kernel supports dynamically loadable
391 * modules, it will then attempt to load a module of the same name or
392 * alias. If that fails it will send a query to any loaded crypto manager
393 * to construct an algorithm on the fly. A refcount is grabbed on the
394 * algorithm which is then associated with the new transform.
396 * The returned transform is of a non-determinate type. Most people
397 * should use one of the more specific allocation functions such as
398 * crypto_alloc_skcipher().
400 * In case of error the return value is an error pointer.
402 struct crypto_tfm
*crypto_alloc_base(const char *alg_name
, u32 type
, u32 mask
)
404 struct crypto_tfm
*tfm
;
408 struct crypto_alg
*alg
;
410 alg
= crypto_alg_mod_lookup(alg_name
, type
, mask
);
416 tfm
= __crypto_alloc_tfm(alg
, type
, mask
);
426 if (fatal_signal_pending(current
)) {
434 EXPORT_SYMBOL_GPL(crypto_alloc_base
);
436 void *crypto_create_tfm_node(struct crypto_alg
*alg
,
437 const struct crypto_type
*frontend
,
441 struct crypto_tfm
*tfm
= NULL
;
442 unsigned int tfmsize
;
446 tfmsize
= frontend
->tfmsize
;
447 total
= tfmsize
+ sizeof(*tfm
) + frontend
->extsize(alg
);
449 mem
= kzalloc_node(total
, GFP_KERNEL
, node
);
453 tfm
= (struct crypto_tfm
*)(mem
+ tfmsize
);
454 tfm
->__crt_alg
= alg
;
457 err
= frontend
->init_tfm(tfm
);
461 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
462 goto cra_init_failed
;
467 crypto_exit_ops(tfm
);
470 crypto_shoot_alg(alg
);
477 EXPORT_SYMBOL_GPL(crypto_create_tfm_node
);
479 struct crypto_alg
*crypto_find_alg(const char *alg_name
,
480 const struct crypto_type
*frontend
,
484 type
&= frontend
->maskclear
;
485 mask
&= frontend
->maskclear
;
486 type
|= frontend
->type
;
487 mask
|= frontend
->maskset
;
490 return crypto_alg_mod_lookup(alg_name
, type
, mask
);
492 EXPORT_SYMBOL_GPL(crypto_find_alg
);
495 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
496 * @alg_name: Name of algorithm
497 * @frontend: Frontend algorithm type
498 * @type: Type of algorithm
499 * @mask: Mask for type comparison
500 * @node: NUMA node in which users desire to put requests, if node is
501 * NUMA_NO_NODE, it means users have no special requirement.
503 * crypto_alloc_tfm() will first attempt to locate an already loaded
504 * algorithm. If that fails and the kernel supports dynamically loadable
505 * modules, it will then attempt to load a module of the same name or
506 * alias. If that fails it will send a query to any loaded crypto manager
507 * to construct an algorithm on the fly. A refcount is grabbed on the
508 * algorithm which is then associated with the new transform.
510 * The returned transform is of a non-determinate type. Most people
511 * should use one of the more specific allocation functions such as
512 * crypto_alloc_skcipher().
514 * In case of error the return value is an error pointer.
517 void *crypto_alloc_tfm_node(const char *alg_name
,
518 const struct crypto_type
*frontend
, u32 type
, u32 mask
,
525 struct crypto_alg
*alg
;
527 alg
= crypto_find_alg(alg_name
, frontend
, type
, mask
);
533 tfm
= crypto_create_tfm_node(alg
, frontend
, node
);
543 if (fatal_signal_pending(current
)) {
551 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node
);
554 * crypto_destroy_tfm - Free crypto transform
555 * @mem: Start of tfm slab
556 * @tfm: Transform to free
558 * This function frees up the transform and any associated resources,
559 * then drops the refcount on the associated algorithm.
561 void crypto_destroy_tfm(void *mem
, struct crypto_tfm
*tfm
)
563 struct crypto_alg
*alg
;
568 alg
= tfm
->__crt_alg
;
570 if (!tfm
->exit
&& alg
->cra_exit
)
572 crypto_exit_ops(tfm
);
574 kfree_sensitive(mem
);
576 EXPORT_SYMBOL_GPL(crypto_destroy_tfm
);
578 int crypto_has_alg(const char *name
, u32 type
, u32 mask
)
581 struct crypto_alg
*alg
= crypto_alg_mod_lookup(name
, type
, mask
);
590 EXPORT_SYMBOL_GPL(crypto_has_alg
);
592 void crypto_req_done(struct crypto_async_request
*req
, int err
)
594 struct crypto_wait
*wait
= req
->data
;
596 if (err
== -EINPROGRESS
)
600 complete(&wait
->completion
);
602 EXPORT_SYMBOL_GPL(crypto_req_done
);
604 MODULE_DESCRIPTION("Cryptographic core API");
605 MODULE_LICENSE("GPL");
606 MODULE_SOFTDEP("pre: cryptomgr");